From 19d80fd21e5fe0f9cc28c7a6d6184d30dfe82621 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Tue, 29 Oct 2024 08:42:36 -0500 Subject: [PATCH 01/25] migrate mdx to asciidoc --- serverless/index.asciidoc | 449 +++++++ .../devtools/debug-grok-expressions.asciidoc | 114 ++ .../{ => devtools}/debug-grok-expressions.mdx | 0 .../devtools/debug-painless-scripts.asciidoc | 19 + .../{ => devtools}/debug-painless-scripts.mdx | 0 .../developer-tools-troubleshooting.asciidoc | 299 +++++ .../developer-tools-troubleshooting.mdx | 0 .../devtools/general-developer-tools.asciidoc | 27 + .../general-developer-tools.mdx | 0 serverless/pages/devtools/index.asciidoc | 16 + .../profile-queries-and-aggregations.asciidoc | 333 +++++ .../profile-queries-and-aggregations.mdx | 0 .../run-api-requests-in-the-console.asciidoc | 215 ++++ .../run-api-requests-in-the-console.mdx | 0 .../apis-elasticsearch-conventions.asciidoc | 235 ++++ .../apis-elasticsearch-conventions.mdx | 0 .../elasticsearch/apis-http-apis.asciidoc | 25 + .../{ => elasticsearch}/apis-http-apis.mdx | 0 .../apis-kibana-conventions.asciidoc | 88 ++ .../apis-kibana-conventions.mdx | 0 .../clients-dot-net-getting-started.asciidoc | 144 +++ .../clients-dot-net-getting-started.mdx | 0 .../clients-go-getting-started.asciidoc | 236 ++++ .../clients-go-getting-started.mdx | 0 .../clients-java-getting-started.asciidoc | 195 +++ .../clients-java-getting-started.mdx | 0 .../clients-nodejs-getting-started.asciidoc | 177 +++ .../clients-nodejs-getting-started.mdx | 0 .../clients-php-getting-started.asciidoc | 212 ++++ .../clients-php-getting-started.mdx | 0 .../clients-python-getting-started.asciidoc | 157 +++ .../clients-python-getting-started.mdx | 0 .../clients-ruby-getting-started.asciidoc | 216 ++++ .../clients-ruby-getting-started.mdx | 0 .../pages/elasticsearch/clients.asciidoc | 18 + .../pages/{ => elasticsearch}/clients.mdx | 0 .../elasticsearch-developer-tools.asciidoc | 21 + .../elasticsearch-developer-tools.mdx | 0 .../explore-your-data-alerting.asciidoc | 159 +++ .../explore-your-data-alerting.mdx | 0 ...lore-your-data-discover-your-data.asciidoc | 203 +++ .../explore-your-data-discover-your-data.mdx | 0 ...re-your-data-the-aggregations-api.asciidoc | 439 +++++++ ...explore-your-data-the-aggregations-api.mdx | 0 ...alize-your-data-create-dashboards.asciidoc | 95 ++ ...-visualize-your-data-create-dashboards.mdx | 0 ...e-your-data-create-visualizations.asciidoc | 384 ++++++ ...ualize-your-data-create-visualizations.mdx | 0 ...ore-your-data-visualize-your-data.asciidoc | 38 + .../explore-your-data-visualize-your-data.mdx | 0 .../elasticsearch/explore-your-data.asciidoc | 14 + .../{ => elasticsearch}/explore-your-data.mdx | 0 .../pages/elasticsearch/get-started.asciidoc | 250 ++++ .../pages/{ => elasticsearch}/get-started.mdx | 0 serverless/pages/elasticsearch/index.asciidoc | 53 + ...your-data-ingest-data-through-api.asciidoc | 140 +++ ...gest-your-data-ingest-data-through-api.mdx | 0 ...t-data-through-integrations-beats.asciidoc | 49 + ...ingest-data-through-integrations-beats.mdx | 0 ...ugh-integrations-connector-client.asciidoc | 303 +++++ ...-through-integrations-connector-client.mdx | 0 ...ata-through-integrations-logstash.asciidoc | 102 ++ ...est-data-through-integrations-logstash.mdx | 0 .../ingest-your-data-upload-file.asciidoc | 47 + .../ingest-your-data-upload-file.mdx | 0 .../elasticsearch/ingest-your-data.asciidoc | 29 + .../{ => elasticsearch}/ingest-your-data.mdx | 0 .../pages/elasticsearch/knn-search.asciidoc | 1095 +++++++++++++++++ .../pages/{ => elasticsearch}/knn-search.mdx | 0 .../pages/elasticsearch/pricing.asciidoc | 56 + .../pages/{ => elasticsearch}/pricing.mdx | 0 .../elasticsearch/search-playground.asciidoc | 17 + .../{ => elasticsearch}/search-playground.mdx | 0 .../search-with-synonyms.asciidoc | 122 ++ .../search-with-synonyms.mdx | 0 ...h-your-data-semantic-search-elser.asciidoc | 392 ++++++ ...search-your-data-semantic-search-elser.mdx | 0 .../search-your-data-semantic-search.asciidoc | 143 +++ .../search-your-data-semantic-search.mdx | 0 .../search-your-data-the-search-api.asciidoc | 22 + .../search-your-data-the-search-api.mdx | 0 .../elasticsearch/search-your-data.asciidoc | 28 + .../{ => elasticsearch}/search-your-data.mdx | 0 .../serverless-differences.asciidoc | 42 + .../serverless-differences.mdx | 0 .../technical-preview-limitations.asciidoc | 25 + .../technical-preview-limitations.mdx | 0 .../what-is-elasticsearch-serverless.asciidoc | 63 + .../what-is-elasticsearch-serverless.mdx | 0 .../pages/general/cloud-regions.asciidoc | 36 + .../pages/{ => general}/cloud-regions.mdx | 0 serverless/pages/general/index.asciidoc | 31 + ...cess-to-org-from-existing-account.asciidoc | 17 + ...ge-access-to-org-from-existing-account.mdx | 0 .../manage-access-to-org-user-roles.asciidoc | 76 ++ .../manage-access-to-org-user-roles.mdx | 0 .../general/manage-access-to-org.asciidoc | 32 + .../{ => general}/manage-access-to-org.mdx | 0 ...manage-billing-check-subscription.asciidoc | 18 + .../manage-billing-check-subscription.mdx | 0 .../general/manage-billing-history.asciidoc | 15 + .../{ => general}/manage-billing-history.mdx | 0 .../manage-billing-monitor-usage.asciidoc | 28 + .../manage-billing-monitor-usage.mdx | 0 .../manage-billing-pricing-model.asciidoc | 45 + .../manage-billing-pricing-model.mdx | 0 .../manage-billing-stop-project.asciidoc | 17 + .../manage-billing-stop-project.mdx | 0 .../pages/general/manage-billing.asciidoc | 32 + .../pages/{ => general}/manage-billing.mdx | 0 serverless/pages/general/manage-org.asciidoc | 25 + serverless/pages/{ => general}/manage-org.mdx | 0 .../manage-your-project-rest-api.asciidoc | 204 +++ .../manage-your-project-rest-api.mdx | 0 .../general/manage-your-project.asciidoc | 130 ++ .../{ => general}/manage-your-project.mdx | 14 +- .../pages/general/service-status.asciidoc | 27 + .../pages/{ => general}/service-status.mdx | 0 serverless/pages/general/sign-up.asciidoc | 97 ++ serverless/pages/{ => general}/sign-up.mdx | 0 .../pages/general/user-profile.asciidoc | 56 + .../pages/{ => general}/user-profile.mdx | 0 .../pages/general/what-is-serverless.asciidoc | 137 +++ .../{ => general}/what-is-serverless.mdx | 0 ...re-your-data-ml-nlp-classify-text.asciidoc | 134 ++ ...explore-your-data-ml-nlp-classify-text.mdx | 0 ...ore-your-data-ml-nlp-deploy-model.asciidoc | 81 ++ .../explore-your-data-ml-nlp-deploy-model.mdx | 0 ...data-ml-nlp-deploy-trained-models.asciidoc | 16 + ...your-data-ml-nlp-deploy-trained-models.mdx | 0 .../explore-your-data-ml-nlp-elser.asciidoc | 169 +++ .../explore-your-data-ml-nlp-elser.mdx | 0 ...explore-your-data-ml-nlp-examples.asciidoc | 11 + .../explore-your-data-ml-nlp-examples.mdx | 0 ...ore-your-data-ml-nlp-extract-info.asciidoc | 145 +++ .../explore-your-data-ml-nlp-extract-info.mdx | 0 ...ore-your-data-ml-nlp-import-model.asciidoc | 133 ++ .../explore-your-data-ml-nlp-import-model.mdx | 0 ...xplore-your-data-ml-nlp-inference.asciidoc | 299 +++++ .../explore-your-data-ml-nlp-inference.mdx | 0 ...plore-your-data-ml-nlp-lang-ident.asciidoc | 322 +++++ .../explore-your-data-ml-nlp-lang-ident.mdx | 0 ...xplore-your-data-ml-nlp-model-ref.asciidoc | 281 +++++ .../explore-your-data-ml-nlp-model-ref.mdx | 0 ...lore-your-data-ml-nlp-ner-example.asciidoc | 328 +++++ .../explore-your-data-ml-nlp-ner-example.mdx | 0 ...lore-your-data-ml-nlp-ootb-models.asciidoc | 12 + .../explore-your-data-ml-nlp-ootb-models.mdx | 0 ...e-your-data-ml-nlp-search-compare.asciidoc | 99 ++ ...xplore-your-data-ml-nlp-search-compare.mdx | 0 ...ore-your-data-ml-nlp-select-model.asciidoc | 24 + .../explore-your-data-ml-nlp-select-model.mdx | 0 ...e-your-data-ml-nlp-test-inference.asciidoc | 67 + ...xplore-your-data-ml-nlp-test-inference.mdx | 0 ...ata-ml-nlp-text-embedding-example.asciidoc | 353 ++++++ ...our-data-ml-nlp-text-embedding-example.mdx | 0 .../hidden/explore-your-data-ml-nlp.asciidoc | 33 + .../{ => hidden}/explore-your-data-ml-nlp.mdx | 0 .../action-connectors.asciidoc | 341 +++++ .../action-connectors.mdx | 0 .../pages/project-settings/api-keys.asciidoc | 119 ++ .../pages/{ => project-settings}/api-keys.mdx | 0 .../project-settings/custom-roles.asciidoc | 114 ++ .../{ => project-settings}/custom-roles.mdx | 0 .../project-settings/data-views.asciidoc | 178 +++ .../{ => project-settings}/data-views.mdx | 0 .../pages/project-settings/files.asciidoc | 16 + .../pages/{ => project-settings}/files.mdx | 0 .../fleet-and-elastic-agent.asciidoc | 18 + .../fleet-and-elastic-agent.mdx | 0 .../index-management.asciidoc | 281 +++++ .../index-management.mdx | 0 .../ingest-pipelines.asciidoc | 58 + .../ingest-pipelines.mdx | 0 .../project-settings/integrations.asciidoc | 16 + .../{ => project-settings}/integrations.mdx | 0 .../logstash-pipelines.asciidoc | 78 ++ .../logstash-pipelines.mdx | 0 .../machine-learning.asciidoc | 49 + .../machine-learning.mdx | 0 .../maintenance-windows.asciidoc | 72 ++ .../maintenance-windows.mdx | 0 .../pages/project-settings/maps.asciidoc | 98 ++ .../pages/{ => project-settings}/maps.mdx | 0 .../project-and-management-settings.asciidoc | 24 + .../project-and-management-settings.mdx | 0 .../project-settings.asciidoc | 90 ++ .../project-settings.mdx | 0 .../pages/project-settings/reports.asciidoc | 24 + .../pages/{ => project-settings}/reports.mdx | 0 .../pages/project-settings/rules.asciidoc | 176 +++ .../pages/{ => project-settings}/rules.mdx | 0 .../project-settings/saved-objects.asciidoc | 115 ++ .../{ => project-settings}/saved-objects.mdx | 0 .../pages/project-settings/spaces.asciidoc | 67 + .../pages/{ => project-settings}/spaces.mdx | 0 .../pages/project-settings/tags.asciidoc | 76 ++ .../pages/{ => project-settings}/tags.mdx | 0 .../project-settings/transforms.asciidoc | 44 + .../{ => project-settings}/transforms.mdx | 0 serverless/pages/visualize-library.asciidoc | 28 + .../pages/welcome-to-serverless.asciidoc | 88 ++ .../deploy-nlp-model-dense-vector.asciidoc | 2 + .../partials/deploy-nlp-model-elser.asciidoc | 2 + .../partials/deploy-nlp-model-widget.asciidoc | 26 + .../field-mappings-dense-vector.asciidoc | 53 + .../partials/field-mappings-elser.asciidoc | 40 + .../partials/field-mappings-widget.asciidoc | 26 + .../generate-embeddings-dense-vector.asciidoc | 38 + .../generate-embeddings-elser.asciidoc | 31 + .../generate-embeddings-widget.asciidoc | 26 + .../hybrid-search-dense-vector.asciidoc | 36 + .../partials/hybrid-search-elser.asciidoc | 41 + .../partials/hybrid-search-widget.asciidoc | 26 + .../partials/minimum-vcus-detail.asciidoc | 11 + .../partials/search-dense-vector.asciidoc | 29 + serverless/partials/search-elser.asciidoc | 24 + serverless/partials/search-widget.asciidoc | 26 + 218 files changed, 13650 insertions(+), 7 deletions(-) create mode 100644 serverless/index.asciidoc create mode 100644 serverless/pages/devtools/debug-grok-expressions.asciidoc rename serverless/pages/{ => devtools}/debug-grok-expressions.mdx (100%) create mode 100644 serverless/pages/devtools/debug-painless-scripts.asciidoc rename serverless/pages/{ => devtools}/debug-painless-scripts.mdx (100%) create mode 100644 serverless/pages/devtools/developer-tools-troubleshooting.asciidoc rename serverless/pages/{ => devtools}/developer-tools-troubleshooting.mdx (100%) create mode 100644 serverless/pages/devtools/general-developer-tools.asciidoc rename serverless/pages/{ => devtools}/general-developer-tools.mdx (100%) create mode 100644 serverless/pages/devtools/index.asciidoc create mode 100644 serverless/pages/devtools/profile-queries-and-aggregations.asciidoc rename serverless/pages/{ => devtools}/profile-queries-and-aggregations.mdx (100%) create mode 100644 serverless/pages/devtools/run-api-requests-in-the-console.asciidoc rename serverless/pages/{ => devtools}/run-api-requests-in-the-console.mdx (100%) create mode 100644 serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc rename serverless/pages/{ => elasticsearch}/apis-elasticsearch-conventions.mdx (100%) create mode 100644 serverless/pages/elasticsearch/apis-http-apis.asciidoc rename serverless/pages/{ => elasticsearch}/apis-http-apis.mdx (100%) create mode 100644 serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc rename serverless/pages/{ => elasticsearch}/apis-kibana-conventions.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-dot-net-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-go-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-go-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-java-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-java-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-nodejs-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-php-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-php-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-python-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-python-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc rename serverless/pages/{ => elasticsearch}/clients-ruby-getting-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/clients.asciidoc rename serverless/pages/{ => elasticsearch}/clients.mdx (100%) create mode 100644 serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc rename serverless/pages/{ => elasticsearch}/elasticsearch-developer-tools.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data-alerting.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data-discover-your-data.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data-the-aggregations-api.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data-visualize-your-data-create-dashboards.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data-visualize-your-data-create-visualizations.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data-visualize-your-data.mdx (100%) create mode 100644 serverless/pages/elasticsearch/explore-your-data.asciidoc rename serverless/pages/{ => elasticsearch}/explore-your-data.mdx (100%) create mode 100644 serverless/pages/elasticsearch/get-started.asciidoc rename serverless/pages/{ => elasticsearch}/get-started.mdx (100%) create mode 100644 serverless/pages/elasticsearch/index.asciidoc create mode 100644 serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc rename serverless/pages/{ => elasticsearch}/ingest-your-data-ingest-data-through-api.mdx (100%) create mode 100644 serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc rename serverless/pages/{ => elasticsearch}/ingest-your-data-ingest-data-through-integrations-beats.mdx (100%) create mode 100644 serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc rename serverless/pages/{ => elasticsearch}/ingest-your-data-ingest-data-through-integrations-connector-client.mdx (100%) create mode 100644 serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc rename serverless/pages/{ => elasticsearch}/ingest-your-data-ingest-data-through-integrations-logstash.mdx (100%) create mode 100644 serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc rename serverless/pages/{ => elasticsearch}/ingest-your-data-upload-file.mdx (100%) create mode 100644 serverless/pages/elasticsearch/ingest-your-data.asciidoc rename serverless/pages/{ => elasticsearch}/ingest-your-data.mdx (100%) create mode 100644 serverless/pages/elasticsearch/knn-search.asciidoc rename serverless/pages/{ => elasticsearch}/knn-search.mdx (100%) create mode 100644 serverless/pages/elasticsearch/pricing.asciidoc rename serverless/pages/{ => elasticsearch}/pricing.mdx (100%) create mode 100644 serverless/pages/elasticsearch/search-playground.asciidoc rename serverless/pages/{ => elasticsearch}/search-playground.mdx (100%) create mode 100644 serverless/pages/elasticsearch/search-with-synonyms.asciidoc rename serverless/pages/{ => elasticsearch}/search-with-synonyms.mdx (100%) create mode 100644 serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc rename serverless/pages/{ => elasticsearch}/search-your-data-semantic-search-elser.mdx (100%) create mode 100644 serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc rename serverless/pages/{ => elasticsearch}/search-your-data-semantic-search.mdx (100%) create mode 100644 serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc rename serverless/pages/{ => elasticsearch}/search-your-data-the-search-api.mdx (100%) create mode 100644 serverless/pages/elasticsearch/search-your-data.asciidoc rename serverless/pages/{ => elasticsearch}/search-your-data.mdx (100%) create mode 100644 serverless/pages/elasticsearch/serverless-differences.asciidoc rename serverless/pages/{ => elasticsearch}/serverless-differences.mdx (100%) create mode 100644 serverless/pages/elasticsearch/technical-preview-limitations.asciidoc rename serverless/pages/{ => elasticsearch}/technical-preview-limitations.mdx (100%) create mode 100644 serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc rename serverless/pages/{ => elasticsearch}/what-is-elasticsearch-serverless.mdx (100%) create mode 100644 serverless/pages/general/cloud-regions.asciidoc rename serverless/pages/{ => general}/cloud-regions.mdx (100%) create mode 100644 serverless/pages/general/index.asciidoc create mode 100644 serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc rename serverless/pages/{ => general}/manage-access-to-org-from-existing-account.mdx (100%) create mode 100644 serverless/pages/general/manage-access-to-org-user-roles.asciidoc rename serverless/pages/{ => general}/manage-access-to-org-user-roles.mdx (100%) create mode 100644 serverless/pages/general/manage-access-to-org.asciidoc rename serverless/pages/{ => general}/manage-access-to-org.mdx (100%) create mode 100644 serverless/pages/general/manage-billing-check-subscription.asciidoc rename serverless/pages/{ => general}/manage-billing-check-subscription.mdx (100%) create mode 100644 serverless/pages/general/manage-billing-history.asciidoc rename serverless/pages/{ => general}/manage-billing-history.mdx (100%) create mode 100644 serverless/pages/general/manage-billing-monitor-usage.asciidoc rename serverless/pages/{ => general}/manage-billing-monitor-usage.mdx (100%) create mode 100644 serverless/pages/general/manage-billing-pricing-model.asciidoc rename serverless/pages/{ => general}/manage-billing-pricing-model.mdx (100%) create mode 100644 serverless/pages/general/manage-billing-stop-project.asciidoc rename serverless/pages/{ => general}/manage-billing-stop-project.mdx (100%) create mode 100644 serverless/pages/general/manage-billing.asciidoc rename serverless/pages/{ => general}/manage-billing.mdx (100%) create mode 100644 serverless/pages/general/manage-org.asciidoc rename serverless/pages/{ => general}/manage-org.mdx (100%) create mode 100644 serverless/pages/general/manage-your-project-rest-api.asciidoc rename serverless/pages/{ => general}/manage-your-project-rest-api.mdx (100%) create mode 100644 serverless/pages/general/manage-your-project.asciidoc rename serverless/pages/{ => general}/manage-your-project.mdx (97%) create mode 100644 serverless/pages/general/service-status.asciidoc rename serverless/pages/{ => general}/service-status.mdx (100%) create mode 100644 serverless/pages/general/sign-up.asciidoc rename serverless/pages/{ => general}/sign-up.mdx (100%) create mode 100644 serverless/pages/general/user-profile.asciidoc rename serverless/pages/{ => general}/user-profile.mdx (100%) create mode 100644 serverless/pages/general/what-is-serverless.asciidoc rename serverless/pages/{ => general}/what-is-serverless.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-classify-text.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-deploy-model.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-deploy-trained-models.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-elser.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-elser.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-examples.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-extract-info.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-import-model.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-import-model.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-inference.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-inference.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-lang-ident.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-model-ref.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-ner-example.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-ootb-models.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-search-compare.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-select-model.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-test-inference.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp-text-embedding-example.mdx (100%) create mode 100644 serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc rename serverless/pages/{ => hidden}/explore-your-data-ml-nlp.mdx (100%) create mode 100644 serverless/pages/project-settings/action-connectors.asciidoc rename serverless/pages/{ => project-settings}/action-connectors.mdx (100%) create mode 100644 serverless/pages/project-settings/api-keys.asciidoc rename serverless/pages/{ => project-settings}/api-keys.mdx (100%) create mode 100644 serverless/pages/project-settings/custom-roles.asciidoc rename serverless/pages/{ => project-settings}/custom-roles.mdx (100%) create mode 100644 serverless/pages/project-settings/data-views.asciidoc rename serverless/pages/{ => project-settings}/data-views.mdx (100%) create mode 100644 serverless/pages/project-settings/files.asciidoc rename serverless/pages/{ => project-settings}/files.mdx (100%) create mode 100644 serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc rename serverless/pages/{ => project-settings}/fleet-and-elastic-agent.mdx (100%) create mode 100644 serverless/pages/project-settings/index-management.asciidoc rename serverless/pages/{ => project-settings}/index-management.mdx (100%) create mode 100644 serverless/pages/project-settings/ingest-pipelines.asciidoc rename serverless/pages/{ => project-settings}/ingest-pipelines.mdx (100%) create mode 100644 serverless/pages/project-settings/integrations.asciidoc rename serverless/pages/{ => project-settings}/integrations.mdx (100%) create mode 100644 serverless/pages/project-settings/logstash-pipelines.asciidoc rename serverless/pages/{ => project-settings}/logstash-pipelines.mdx (100%) create mode 100644 serverless/pages/project-settings/machine-learning.asciidoc rename serverless/pages/{ => project-settings}/machine-learning.mdx (100%) create mode 100644 serverless/pages/project-settings/maintenance-windows.asciidoc rename serverless/pages/{ => project-settings}/maintenance-windows.mdx (100%) create mode 100644 serverless/pages/project-settings/maps.asciidoc rename serverless/pages/{ => project-settings}/maps.mdx (100%) create mode 100644 serverless/pages/project-settings/project-and-management-settings.asciidoc rename serverless/pages/{ => project-settings}/project-and-management-settings.mdx (100%) create mode 100644 serverless/pages/project-settings/project-settings.asciidoc rename serverless/pages/{ => project-settings}/project-settings.mdx (100%) create mode 100644 serverless/pages/project-settings/reports.asciidoc rename serverless/pages/{ => project-settings}/reports.mdx (100%) create mode 100644 serverless/pages/project-settings/rules.asciidoc rename serverless/pages/{ => project-settings}/rules.mdx (100%) create mode 100644 serverless/pages/project-settings/saved-objects.asciidoc rename serverless/pages/{ => project-settings}/saved-objects.mdx (100%) create mode 100644 serverless/pages/project-settings/spaces.asciidoc rename serverless/pages/{ => project-settings}/spaces.mdx (100%) create mode 100644 serverless/pages/project-settings/tags.asciidoc rename serverless/pages/{ => project-settings}/tags.mdx (100%) create mode 100644 serverless/pages/project-settings/transforms.asciidoc rename serverless/pages/{ => project-settings}/transforms.mdx (100%) create mode 100644 serverless/pages/visualize-library.asciidoc create mode 100644 serverless/pages/welcome-to-serverless.asciidoc create mode 100644 serverless/partials/deploy-nlp-model-dense-vector.asciidoc create mode 100644 serverless/partials/deploy-nlp-model-elser.asciidoc create mode 100644 serverless/partials/deploy-nlp-model-widget.asciidoc create mode 100644 serverless/partials/field-mappings-dense-vector.asciidoc create mode 100644 serverless/partials/field-mappings-elser.asciidoc create mode 100644 serverless/partials/field-mappings-widget.asciidoc create mode 100644 serverless/partials/generate-embeddings-dense-vector.asciidoc create mode 100644 serverless/partials/generate-embeddings-elser.asciidoc create mode 100644 serverless/partials/generate-embeddings-widget.asciidoc create mode 100644 serverless/partials/hybrid-search-dense-vector.asciidoc create mode 100644 serverless/partials/hybrid-search-elser.asciidoc create mode 100644 serverless/partials/hybrid-search-widget.asciidoc create mode 100644 serverless/partials/minimum-vcus-detail.asciidoc create mode 100644 serverless/partials/search-dense-vector.asciidoc create mode 100644 serverless/partials/search-elser.asciidoc create mode 100644 serverless/partials/search-widget.asciidoc diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc new file mode 100644 index 0000000000..9755620665 --- /dev/null +++ b/serverless/index.asciidoc @@ -0,0 +1,449 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +:security-serverless: {security-docs-root}/docs/serverless +:observability-serverless: {observability-docs-root}/docs/en/serverless +:elasticsearch-serverless: {docs-content-root}/serverless/pages/elasticsearch +:general-serverless: {docs-content-root}/serverless/pages/general +:devtools-serverless: {docs-content-root}/serverless/pages/devtools + += Serverless + +[[intro]] +== Welcome to Elastic serverless + +include::{docs-content-root}/serverless/pages/welcome-to-serverless.asciidoc[leveloffset=+2] + +include::{general-serverless}/what-is-serverless.asciidoc[leveloffset=+2] + +include::{general-serverless}/sign-up.asciidoc[leveloffset=+2] + +include::{general-serverless}/manage-org.asciidoc[leveloffset=+2] +include::{general-serverless}/manage-access-to-org.asciidoc[leveloffset=+3] +include::{general-serverless}/manage-access-to-org-user-roles.asciidoc[leveloffset=+3] +include::{general-serverless}/manage-access-to-org-from-existing-account.asciidoc[leveloffset=+3] + +include::{general-serverless}/manage-your-project.asciidoc[leveloffset=+2] +include::{general-serverless}/manage-your-project-rest-api.asciidoc[leveloffset=+3] + +include::{general-serverless}/manage-billing.asciidoc[leveloffset=+2] +include::{general-serverless}/manage-billing-check-subscription.asciidoc[leveloffset=+3] +include::{general-serverless}/manage-billing-monitor-usage.asciidoc[leveloffset=+3] +include::{general-serverless}/manage-billing-history.asciidoc[leveloffset=+3] +include::{general-serverless}/manage-billing-pricing-model.asciidoc[leveloffset=+3] +include::{general-serverless}/manage-billing-stop-project.asciidoc[leveloffset=+3] + +include::{general-serverless}/service-status.asciidoc[leveloffset=+2] + +include::{general-serverless}/user-profile.asciidoc[leveloffset=+2] + +include::{general-serverless}/cloud-regions.asciidoc[leveloffset=+2] + +[[what-is-elasticsearch-serverless]] +== Elasticsearch + +include::{elasticsearch-serverless}/what-is-elasticsearch-serverless.asciidoc[leveloffset=+2] + +include::{elasticsearch-serverless}/pricing.asciidoc[leveloffset=+2] + +include::{elasticsearch-serverless}/get-started.asciidoc[leveloffset=+2] + +include::{elasticsearch-serverless}/clients.asciidoc[leveloffset=+2] +include::{elasticsearch-serverless}/clients-go-getting-started.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/clients-java-getting-started.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/clients-dot-net-getting-started.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/clients-nodejs-getting-started.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/clients-php-getting-started.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/clients-python-getting-started.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/clients-ruby-getting-started.asciidoc[leveloffset=+3] + +include::{elasticsearch-serverless}/apis-http-apis.asciidoc[leveloffset=+2] +include::{elasticsearch-serverless}/apis-elasticsearch-conventions.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/apis-kibana-conventions.asciidoc[leveloffset=+3] + +include::{elasticsearch-serverless}/elasticsearch-developer-tools.asciidoc[leveloffset=+2] + +include::{elasticsearch-serverless}/ingest-your-data.asciidoc[leveloffset=+2] +include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/ingest-your-data-upload-file.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+3] + +include::{elasticsearch-serverless}/search-your-data.asciidoc[leveloffset=+2] +include::{elasticsearch-serverless}/search-your-data-the-search-api.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/search-with-synonyms.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/knn-search.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/search-your-data-semantic-search.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/search-your-data-semantic-search-elser.asciidoc[leveloffset=+4] + +include::{elasticsearch-serverless}/explore-your-data.asciidoc[leveloffset=+2] +include::{elasticsearch-serverless}/explore-your-data-the-aggregations-api.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/explore-your-data-discover-your-data.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/explore-your-data-visualize-your-data.asciidoc[leveloffset=+3] +include::{elasticsearch-serverless}/explore-your-data-alerting.asciidoc[leveloffset=+3] + +include::{elasticsearch-serverless}/search-playground.asciidoc[leveloffset=+2] + +include::{elasticsearch-serverless}/serverless-differences.asciidoc[leveloffset=+2] + +include::{elasticsearch-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] + +[[what-is-observability-serverless]] +== Elastic Observability + +Hello world + +include::{observability-serverless}/observability-overview.asciidoc[leveloffset=+2] + +include::{observability-serverless}/quickstarts/overview.asciidoc[leveloffset=+2] +include::{observability-serverless}/quickstarts/monitor-hosts-with-elastic-agent.asciidoc[leveloffset=+3] +include::{observability-serverless}/quickstarts/k8s-logs-metrics.asciidoc[leveloffset=+3] + +include::{observability-serverless}/projects/billing.asciidoc[leveloffset=+2] + +include::{observability-serverless}/projects/create-an-observability-project.asciidoc[leveloffset=+2] + +include::{observability-serverless}/logging/log-monitoring.asciidoc[leveloffset=+2] +include::{observability-serverless}/logging/get-started-with-logs.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/stream-log-files.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/correlate-application-logs.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/plaintext-application-logs.asciidoc[leveloffset=+4] +include::{observability-serverless}/logging/ecs-application-logs.asciidoc[leveloffset=+4] +include::{observability-serverless}/logging/send-application-logs.asciidoc[leveloffset=+4] +include::{observability-serverless}/logging/parse-log-data.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/filter-and-aggregate-logs.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/view-and-monitor-logs.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/add-logs-service-name.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/run-log-pattern-analysis.asciidoc[leveloffset=+3] +include::{observability-serverless}/logging/troubleshoot-logs.asciidoc[leveloffset=+3] + +include::{observability-serverless}/inventory.asciidoc[leveloffset=+2] + +include::{observability-serverless}/apm/apm.asciidoc[leveloffset=+2] +include::{observability-serverless}/apm/apm-get-started.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-send-traces-to-elastic.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm-agents/apm-agents-elastic-apm-agents.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm-agents/apm-agents-opentelemetry.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-opentelemetry-native-support.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-collect-metrics.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-limitations.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-resource-attributes.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm-agents/apm-agents-aws-lambda-functions.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-view-and-analyze-traces.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-find-transaction-latency-and-failure-correlations.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-integrate-with-machine-learning.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-create-custom-links.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-track-deployments-with-annotations.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-query-your-data.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-filter-your-data.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-observe-lambda-functions.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-ui-overview.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-ui-services.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-traces.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-dependencies.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-service-map.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-service-overview.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-transactions.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-trace-sample-timeline.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-errors.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-metrics.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-infrastructure.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-ui-logs.asciidoc[leveloffset=+5] +include::{observability-serverless}/apm/apm-data-types.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-distributed-tracing.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-reduce-your-data-usage.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-transaction-sampling.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-compress-spans.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-stacktrace-collection.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-keep-data-secure.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-troubleshooting.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-reference.asciidoc[leveloffset=+3] +include::{observability-serverless}/apm/apm-kibana-settings.asciidoc[leveloffset=+4] +include::{observability-serverless}/apm/apm-server-api.asciidoc[leveloffset=+4] + +include::{observability-serverless}/infra-monitoring/infra-monitoring.asciidoc[leveloffset=+2] +include::{observability-serverless}/infra-monitoring/get-started-with-metrics.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/view-infrastructure-metrics.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/analyze-hosts.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/detect-metric-anomalies.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/configure-infra-settings.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/troubleshooting-infra.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/handle-no-results-found-message.asciidoc[leveloffset=+4] +include::{observability-serverless}/infra-monitoring/metrics-reference.asciidoc[leveloffset=+3] +include::{observability-serverless}/infra-monitoring/host-metrics.asciidoc[leveloffset=+4] +include::{observability-serverless}/infra-monitoring/container-metrics.asciidoc[leveloffset=+4] +include::{observability-serverless}/infra-monitoring/kubernetes-pod-metrics.asciidoc[leveloffset=+4] +include::{observability-serverless}/infra-monitoring/aws-metrics.asciidoc[leveloffset=+4] +include::{observability-serverless}/infra-monitoring/metrics-app-fields.asciidoc[leveloffset=+3] + +include::{observability-serverless}/synthetics/synthetics-intro.asciidoc[leveloffset=+2] +include::{observability-serverless}/synthetics/synthetics-get-started.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-get-started-project.asciidoc[leveloffset=+4] +include::{observability-serverless}/synthetics/synthetics-get-started-ui.asciidoc[leveloffset=+4] +include::{observability-serverless}/synthetics/synthetics-journeys.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-create-test.asciidoc[leveloffset=+4] +include::{observability-serverless}/synthetics/synthetics-monitor-use.asciidoc[leveloffset=+4] +include::{observability-serverless}/synthetics/synthetics-recorder.asciidoc[leveloffset=+4] +include::{observability-serverless}/synthetics/synthetics-lightweight.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-manage-monitors.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-params-secrets.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-analyze.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-private-location.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-command-reference.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-configuration.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-settings.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-feature-roles.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-manage-retention.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-scale-and-architect.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-security-encryption.asciidoc[leveloffset=+3] +include::{observability-serverless}/synthetics/synthetics-troubleshooting.asciidoc[leveloffset=+3] + +include::{observability-serverless}/dashboards/dashboards-and-visualizations.asciidoc[leveloffset=+2] + +include::{observability-serverless}/alerting/alerting.asciidoc[leveloffset=+2] +include::{observability-serverless}/alerting/create-manage-rules.asciidoc[leveloffset=+3] +include::{observability-serverless}/alerting/aiops-generate-anomaly-alerts.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-anomaly-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-custom-threshold-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-elasticsearch-query-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-error-count-threshold-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-failed-transaction-rate-threshold-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-inventory-threshold-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-latency-threshold-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/create-slo-burn-rate-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/aggregation-options.asciidoc[leveloffset=+3] +include::{observability-serverless}/alerting/rate-aggregation.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/view-alerts.asciidoc[leveloffset=+3] +include::{observability-serverless}/alerting/triage-slo-burn-rate-breaches.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/triage-threshold-breaches.asciidoc[leveloffset=+4] + +include::{observability-serverless}/slos/slos.asciidoc[leveloffset=+2] +include::{observability-serverless}/slos/create-an-slo.asciidoc[leveloffset=+3] + +include::{observability-serverless}/cases/cases.asciidoc[leveloffset=+2] +include::{observability-serverless}/cases/create-manage-cases.asciidoc[leveloffset=+3] +include::{observability-serverless}/cases/manage-cases-settings.asciidoc[leveloffset=+3] + +include::{observability-serverless}/aiops/aiops.asciidoc[leveloffset=+2] +include::{observability-serverless}/aiops/aiops-detect-anomalies.asciidoc[leveloffset=+3] +include::{observability-serverless}/aiops/aiops-tune-anomaly-detection-job.asciidoc[leveloffset=+4] +include::{observability-serverless}/aiops/aiops-forecast-anomaly.asciidoc[leveloffset=+4] +include::{observability-serverless}/aiops/aiops-analyze-spikes.asciidoc[leveloffset=+3] +include::{observability-serverless}/aiops/aiops-detect-change-points.asciidoc[leveloffset=+3] + +include::{observability-serverless}/monitor-datasets.asciidoc[leveloffset=+2] + +include::{observability-serverless}/ai-assistant/ai-assistant.asciidoc[leveloffset=+2] + +include::{observability-serverless}/elastic-entity-model.asciidoc[leveloffset=+2] + +include::{observability-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] + +[[what-is-security-serverless]] +== Elastic Security + +Hello world + +include::{security-serverless}/security-overview.asciidoc[leveloffset=+2] + +include::{security-serverless}/billing.asciidoc[leveloffset=+2] + +include::{security-serverless}/projects-create/create-project.asciidoc[leveloffset=+2] + +include::{security-serverless}/sec-requirements.asciidoc[leveloffset=+2] + +include::{security-serverless}/security-ui.asciidoc[leveloffset=+2] + +include::{security-serverless}/AI-for-security/ai-for-security-landing-pg.asciidoc[leveloffset=+2] +include::{security-serverless}/AI-for-security/ai-assistant.asciidoc[leveloffset=+3] +include::{security-serverless}/AI-for-security/attack-discovery.asciidoc[leveloffset=+3] +include::{security-serverless}/AI-for-security/llm-connector-guides.asciidoc[leveloffset=+3] +include::{security-serverless}/AI-for-security/llm-performance-matrix.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/connect-to-azure-openai.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/connect-to-bedrock.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/connect-to-openai.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/connect-to-vertex.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/connect-to-byo-llm.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/ai-use-cases.asciidoc[leveloffset=+3] +include::{security-serverless}/AI-for-security/usecase-attack-disc-ai-assistant-incident-reporting.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/ai-assistant-alert-triage.asciidoc[leveloffset=+4] +include::{security-serverless}/AI-for-security/ai-assistant-esql-queries.asciidoc[leveloffset=+4] + +include::{security-serverless}/ingest/ingest-data.asciidoc[leveloffset=+2] +include::{security-serverless}/ingest/threat-intelligence.asciidoc[leveloffset=+3] +include::{security-serverless}/ingest/auto-import.asciidoc[leveloffset=+3] + +include::{security-serverless}/edr-install-config/endpoint-protection-intro.asciidoc[leveloffset=+2] +include::{security-serverless}/edr-install-config/deploy-endpoint-reqs.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-install-config/install-elastic-defend.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-install-config/deploy-endpoint-macos-cat-mont.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/deploy-endpoint-macos-ven.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/deploy-with-mdm.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/agent-tamper-protection.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/defend-feature-privs.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-install-config/configure-endpoint-integration-policy.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-install-config/artifact-control.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/endpoint-diagnostic-data.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/self-healing-rollback.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/linux-file-monitoring.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/endpoint-data-volume.asciidoc[leveloffset=+4] +include::{security-serverless}/edr-install-config/uninstall-agent.asciidoc[leveloffset=+3] + +include::{security-serverless}/edr-manage/manage-endpoint-protection.asciidoc[leveloffset=+2] +include::{security-serverless}/edr-manage/endpoints-page.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/policies-page-ov.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/trusted-apps-ov.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/event-filters.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/host-isolation-exceptions.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/blocklist.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/optimize-edr.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/endpoint-event-capture.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/allowlist-endpoint-3rd-party-av.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/endpoint-self-protection.asciidoc[leveloffset=+3] +include::{security-serverless}/edr-manage/endpoint-command-ref.asciidoc[leveloffset=+3] + +include::{security-serverless}/endpoint-response-actions/response-actions.asciidoc[leveloffset=+2] +include::{security-serverless}/endpoint-response-actions/automated-response-actions.asciidoc[leveloffset=+3] +include::{security-serverless}/endpoint-response-actions/host-isolation-ov.asciidoc[leveloffset=+3] +include::{security-serverless}/endpoint-response-actions/response-actions-history.asciidoc[leveloffset=+3] +include::{security-serverless}/endpoint-response-actions/third-party-actions.asciidoc[leveloffset=+3] +include::{security-serverless}/endpoint-response-actions/response-actions-config.asciidoc[leveloffset=+3] + +include::{security-serverless}/cloud-native-security/cloud-native-security-overview.asciidoc[leveloffset=+2] +// include::{security-serverless}/cloud-native-security/security-posture-management.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/enable-cloudsec.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/cspm.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/cspm-get-started.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/cspm-get-started-gcp.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/cspm-get-started-azure.asciidoc[leveloffset=+4] +// include::{security-serverless}/cloud-native-security/cspm-findings-page.asciidoc[leveloffset=+4] +// include::{security-serverless}/cloud-native-security/benchmark-rules.asciidoc[leveloffset=+4] +// include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+4] +// include::{security-serverless}/cloud-native-security/cspm-security-posture-faq.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/kspm.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/get-started-with-kspm.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/cspm-findings-page.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/benchmark-rules.asciidoc[leveloffset=+4] +// include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/security-posture-faq.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/vuln-management-overview.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/vuln-management-get-started.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/vuln-management-findings.asciidoc[leveloffset=+4] +// include::{security-serverless}/dashboards/vuln-management-dashboard-dash.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/vuln-management-faq.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/d4c-overview.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/d4c-get-started.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/d4c-policy-guide.asciidoc[leveloffset=+4] +// include::{security-serverless}/dashboards/kubernetes-dashboard-dash.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/cloud-workload-protection.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/environment-variable-capture.asciidoc[leveloffset=+4] + +include::{security-serverless}/explore/explore-your-data.asciidoc[leveloffset=+2] +include::{security-serverless}/explore/hosts-overview.asciidoc[leveloffset=+3] +include::{security-serverless}/explore/network-page-overview.asciidoc[leveloffset=+3] +include::{security-serverless}/explore/conf-map-ui.asciidoc[leveloffset=+4] +include::{security-serverless}/explore/users-page.asciidoc[leveloffset=+3] +include::{security-serverless}/explore/data-views-in-sec.asciidoc[leveloffset=+3] +include::{security-serverless}/explore/runtime-fields.asciidoc[leveloffset=+3] +include::{security-serverless}/explore/siem-field-reference.asciidoc[leveloffset=+3] + +include::{security-serverless}/dashboards/dashboards-overview.asciidoc[leveloffset=+2] +include::{security-serverless}/dashboards/overview-dashboard.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/detection-response-dashboard.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/kubernetes-dashboard-dash.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/detection-entity-dashboard.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/data-quality-dash.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/vuln-management-dashboard-dash.asciidoc[leveloffset=+3] +include::{security-serverless}/dashboards/rule-monitoring-dashboard.asciidoc[leveloffset=+3] + +include::{security-serverless}/rules/detection-engine-overview.asciidoc[leveloffset=+2] +include::{security-serverless}/rules/detections-permissions-section.asciidoc[leveloffset=+3] + +include::{security-serverless}/rules/about-rules.asciidoc[leveloffset=+2] +include::{security-serverless}/rules/rules-ui-create.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/interactive-investigation-guides.asciidoc[leveloffset=+4] +include::{security-serverless}/rules/building-block-rule.asciidoc[leveloffset=+4] +include::{security-serverless}/rules/prebuilt-rules/prebuilt-rules-management.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/rules-ui-management.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/alerts-ui-monitor.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/detections-ui-exceptions.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/value-lists-exceptions.asciidoc[leveloffset=+4] +include::{security-serverless}/rules/add-exceptions.asciidoc[leveloffset=+4] +include::{security-serverless}/rules/shared-exception-lists.asciidoc[leveloffset=+4] +include::{security-serverless}/rules/rules-coverage.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/tuning-detection-signals.asciidoc[leveloffset=+3] +include::{security-serverless}/rules/prebuilt-rules/prebuilt-rules.asciidoc[leveloffset=+3] + +include::{security-serverless}/alerts/alerts-ui-manage.asciidoc[leveloffset=+2] +include::{security-serverless}/alerts/visualize-alerts.asciidoc[leveloffset=+3] +include::{security-serverless}/alerts/view-alert-details.asciidoc[leveloffset=+3] +include::{security-serverless}/alerts/signals-to-cases.asciidoc[leveloffset=+3] +include::{security-serverless}/alerts/alert-suppression.asciidoc[leveloffset=+3] +include::{security-serverless}/alerts/reduce-notifications-alerts.asciidoc[leveloffset=+3] +include::{security-serverless}/alerts/query-alert-indices.asciidoc[leveloffset=+3] +include::{security-serverless}/alerts/alert-schema.asciidoc[leveloffset=+3] + +include::{security-serverless}/advanced-entity-analytics/advanced-entity-analytics-overview.asciidoc[leveloffset=+2] +include::{security-serverless}/advanced-entity-analytics/entity-risk-scoring.asciidoc[leveloffset=+3] +include::{security-serverless}/advanced-entity-analytics/ers-req.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/asset-criticality.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/turn-on-risk-engine.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/analyze-risk-score-data.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/advanced-behavioral-detections.asciidoc[leveloffset=+3] +include::{security-serverless}/advanced-entity-analytics/ml-requirements.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/machine-learning.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/tuning-anomaly-results.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/behavioral-detection-use-cases.asciidoc[leveloffset=+4] +include::{security-serverless}/advanced-entity-analytics/prebuilt-ml-jobs.asciidoc[leveloffset=+4] + +include::{security-serverless}/investigate/investigate-events.asciidoc[leveloffset=+2] +include::{security-serverless}/investigate/timelines-ui.asciidoc[leveloffset=+3] +include::{security-serverless}/investigate/timeline-templates-ui.asciidoc[leveloffset=+4] +include::{security-serverless}/investigate/timeline-object-schema.asciidoc[leveloffset=+4] +include::{security-serverless}/alerts/visual-event-analyzer.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/session-view.asciidoc[leveloffset=+3] +include::{security-serverless}/osquery/use-osquery.asciidoc[leveloffset=+3] +include::{security-serverless}/osquery/osquery-response-action.asciidoc[leveloffset=+4] +include::{security-serverless}/osquery/invest-guide-run-osquery.asciidoc[leveloffset=+4] +include::{security-serverless}/osquery/alerts-run-osquery.asciidoc[leveloffset=+4] +include::{security-serverless}/osquery/view-osquery-results.asciidoc[leveloffset=+4] +include::{security-serverless}/osquery/osquery-placeholder-fields.asciidoc[leveloffset=+4] +include::{security-serverless}/investigate/indicators-of-compromise.asciidoc[leveloffset=+3] +include::{security-serverless}/investigate/cases-overview.asciidoc[leveloffset=+3] +include::{security-serverless}/investigate/case-permissions.asciidoc[leveloffset=+4] +include::{security-serverless}/investigate/cases-open-manage.asciidoc[leveloffset=+4] +include::{security-serverless}/investigate/cases-settings.asciidoc[leveloffset=+4] + +include::{security-serverless}/assets/asset-management.asciidoc[leveloffset=+2] + +include::{security-serverless}/settings/manage-settings.asciidoc[leveloffset=+2] +include::{security-serverless}/settings/project-settings.asciidoc[leveloffset=+3] +include::{security-serverless}/settings/advanced-settings.asciidoc[leveloffset=+3] + +include::{security-serverless}/troubleshooting/troubleshooting-intro.asciidoc[leveloffset=+2] +include::{security-serverless}/troubleshooting/ts-detection-rules.asciidoc[leveloffset=+3] +include::{security-serverless}/troubleshooting/troubleshoot-endpoints.asciidoc[leveloffset=+3] + +include::{security-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] + +== Dev tools + +Hello world + +include::{devtools-serverless}/run-api-requests-in-the-console.asciidoc[leveloffset=+2] + +include::{devtools-serverless}/profile-queries-and-aggregations.asciidoc[leveloffset=+2] + +include::{devtools-serverless}/debug-grok-expressions.asciidoc[leveloffset=+2] + +include::{devtools-serverless}/debug-painless-scripts.asciidoc[leveloffset=+2] + +include::{devtools-serverless}/developer-tools-troubleshooting.asciidoc[leveloffset=+2] + +== Project and management settings + +Hello world \ No newline at end of file diff --git a/serverless/pages/devtools/debug-grok-expressions.asciidoc b/serverless/pages/devtools/debug-grok-expressions.asciidoc new file mode 100644 index 0000000000..9c63a6f20c --- /dev/null +++ b/serverless/pages/devtools/debug-grok-expressions.asciidoc @@ -0,0 +1,114 @@ +[[debug-grok-expressions]] += Grok Debugger + +:description: Build and debug grok patterns before you use them in your data processing pipelines. +:keywords: serverless, dev tools, how-to + +preview:[] + +This content applies to: + +You can build and debug grok patterns in the **Grok Debugger** before you use them in your data processing pipelines. +Grok is a pattern-matching syntax that you can use to parse and structure arbitrary text. +Grok is good for parsing syslog, apache, and other webserver logs, mysql logs, and in general, +any log format written for human consumption. + +Grok patterns are supported in {es} {ref}/runtime.html[runtime fields], +the {es} {ref}/grok-processor.html[grok ingest processor], +and the {ls} {logstash-ref}/plugins-filters-grok.html[grok filter]. +For syntax, see {ref}/grok.html[Grokking grok]. + +Elastic ships with more than 120 reusable grok patterns. +For a complete list of patterns, see +https://github.com/elastic/elasticsearch/tree/master/libs/grok/src/main/resources/patterns[{es} +grok patterns] +and https://github.com/logstash-plugins/logstash-patterns-core/tree/master/patterns[{ls} +grok patterns]. + +// TODO: Figure out where to link to for grok patterns. Looks like the dir structure has changed. + +Because {es} and {ls} share the same grok implementation and pattern libraries, +any grok pattern that you create in the **Grok Debugger** will work in both {es} and {ls}. + +[discrete] +[[grokdebugger-getting-started]] +== Get started + +This example walks you through using the **Grok Debugger**. + +.Required roles +[NOTE] +==== +The **Admin** role is required to use the Grok Debugger. +For more information, refer to https://www.elastic.co/docs/current/serverless/general/assign-user-roles[] +==== + +. From the main menu, click **Developer Tools**, then click **Grok Debugger**. +. In **Sample Data**, enter a message that is representative of the data you want to parse. +For example: ++ +[source,ruby] +---- +55.3.244.1 GET /index.html 15824 0.043 +---- +. In **Grok Pattern**, enter the grok pattern that you want to apply to the data. ++ +To parse the log line in this example, use: ++ +[source,ruby] +---- +%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration} +---- +. Click **Simulate**. ++ +You'll see the simulated event that results from applying the grok +pattern. ++ +[role="screenshot"] +image::images/grok-debugger-overview.png["Grok Debugger"] + +[discrete] +[[grokdebugger-custom-patterns]] +== Test custom patterns + +If the default grok pattern dictionary doesn't contain the patterns you need, +you can define, test, and debug custom patterns using the **Grok Debugger**. + +Custom patterns that you enter in the **Grok Debugger** are not saved. Custom patterns +are only available for the current debugging session and have no side effects. + +Follow this example to define a custom pattern. + +. In **Sample Data**, enter the following sample message: ++ +[source,ruby] +---- +Jan 1 06:25:43 mailserver14 postfix/cleanup[21403]: BEF25A72965: message-id=<20130101142543.5828399CCAF@mailserver14.example.com> +---- +. Enter this grok pattern: ++ +[source,ruby] +---- +%{SYSLOGBASE} %{POSTFIX_QUEUEID:queue_id}: %{MSG:syslog_message} +---- ++ +Notice that the grok pattern references custom patterns called `POSTFIX_QUEUEID` and `MSG`. +. Expand **Custom Patterns** and enter pattern definitions for the custom patterns that you want to use in the grok expression. +You must specify each pattern definition on its own line. ++ +For this example, you must specify pattern definitions +for `POSTFIX_QUEUEID` and `MSG`: ++ +[source,ruby] +---- +POSTFIX_QUEUEID [0-9A-F]{10,11} +MSG message-id=<%{GREEDYDATA}> +---- +. Click **Simulate**. ++ +You'll see the simulated output event that results from applying the grok pattern that contains the custom pattern: ++ +[role="screenshot"] +image::images/grok-debugger-custom-pattern.png["Debugging a custom pattern"] ++ +If an error occurs, you can continue iterating over the custom pattern until the output matches your expected event. diff --git a/serverless/pages/debug-grok-expressions.mdx b/serverless/pages/devtools/debug-grok-expressions.mdx similarity index 100% rename from serverless/pages/debug-grok-expressions.mdx rename to serverless/pages/devtools/debug-grok-expressions.mdx diff --git a/serverless/pages/devtools/debug-painless-scripts.asciidoc b/serverless/pages/devtools/debug-painless-scripts.asciidoc new file mode 100644 index 0000000000..78a5d601c0 --- /dev/null +++ b/serverless/pages/devtools/debug-painless-scripts.asciidoc @@ -0,0 +1,19 @@ +[[debug-painless-scripts]] += Painless Lab + +:description: Use our interactive code editor to test and debug Painless scripts in real-time. +:keywords: serverless, dev tools, how-to + +preview:[] + +This content applies to: + +beta::[] + +The **Painless Lab** is an interactive code editor that lets you test and debug {ref}/modules-scripting-painless.html[Painless scripts] in real-time. +You can use Painless to safely write inline and stored scripts anywhere scripts are supported. + +To get started, open the main menu, click **Developer Tools**, and then click **Painless Lab**. + +[role="screenshot"] +image::images/painless-lab.png[Painless Lab] diff --git a/serverless/pages/debug-painless-scripts.mdx b/serverless/pages/devtools/debug-painless-scripts.mdx similarity index 100% rename from serverless/pages/debug-painless-scripts.mdx rename to serverless/pages/devtools/debug-painless-scripts.mdx diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc new file mode 100644 index 0000000000..457fe0229d --- /dev/null +++ b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc @@ -0,0 +1,299 @@ +[[dev-tools-troubleshooting]] += Troubleshooting + +:description: Troubleshoot searches. +:keywords: serverless, troubleshooting + +preview:[] + +When you query your data, Elasticsearch may return an error, no search results, +or results in an unexpected order. This guide describes how to troubleshoot +searches. + +[discrete] +[[dev-tools-troubleshooting-ensure-the-data-stream-index-or-alias-exists]] +== Ensure the data stream, index, or alias exists + +Elasticsearch returns an `index_not_found_exception` when the data stream, index +or alias you try to query does not exist. This can happen when you misspell the +name or when the data has been indexed to a different data stream or index. + +Use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-exists-index-template[**Exists API**] to check whether +a data stream, index, or alias exists: + +[source,js] +---- +HEAD my-index +---- + +Use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-get[**Get index API**] +to list all indices and their aliases: + +[source,js] +---- +GET /_all?filter_path=*.aliases +---- + +Instead of an error, it is possible to retrieve partial search results if some +of the indices you're querying are unavailable. +Set `ignore_unavailable` to `true`: + +[source,js] +---- +GET /my-alias/_search?ignore_unavailable=true +---- + +[discrete] +[[dev-tools-troubleshooting-ensure-the-data-stream-or-index-contains-data]] +== Ensure the data stream or index contains data + +When a search request returns no hits, the data stream or index may contain no +data. +This can happen when there is a data ingestion issue. +For example, the data may have been indexed to a data stream or index with +another name. + +Use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-count-1[**Count API**] +to retrieve the number of documents in a data +stream or index. +Check that `count` in the response is not 0. + +[source,js] +---- +GET /my-index-000001/_count +---- + +[NOTE] +==== +If you aren't getting search results in the UI, check that you have selected the +correct data view and a valid time range. Also, ensure the data view has been +configured with the correct time field. +==== + +[discrete] +[[dev-tools-troubleshooting-check-that-the-field-exists-and-its-capabilities]] +== Check that the field exists and its capabilities + +Querying a field that does not exist will not return any results. +Use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-field-caps[**Field capabilities API**] +to check whether a field exists: + +[source,js] +---- +GET /my-index-000001/_field_caps?fields=my-field +---- + +If the field does not exist, check the data ingestion process. +The field may have a different name. + +If the field exists, the request will return the field's type and whether it is +searchable and aggregatable. + +[source,console-response] +---- +{ + "indices": [ + "my-index-000001" + ], + "fields": { + "my-field": { + "keyword": { + "type": "keyword", <1> + "metadata_field": false, + "searchable": true, <2> + "aggregatable": true <3> + } + } + } +} +---- + +<1> The field is of type `keyword` in this index. + +<2> The field is searchable in this index. + +<3> The field is aggregatable in this index. + +[discrete] +[[dev-tools-troubleshooting-check-the-fields-mappings]] +== Check the field's mappings + +A field's capabilities are determined by its {ref}/mapping.html[mapping]. +To retrieve the mapping, use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-get-mapping[**Get mapping API**]: + +[source,js] +---- +GET /my-index-000001/_mappings +---- + +If you query a `text` field, pay attention to the analyzer that may have been +configured. +You can use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-analyze[**Analyze API**] +to check how a field's analyzer processes values and query terms: + +[source,js] +---- +GET /my-index-000001/_analyze +{ + "field": "my-field", + "text": "this is a test" +} +---- + +To change the mapping of an existing field use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-mapping-1[**Update mapping API**]. + +[discrete] +[[dev-tools-troubleshooting-check-the-fields-values]] +== Check the field's values + +Use the `exists` query to check whether there are +documents that return a value for a field. +Check that `count` in the response is +not 0. + +[source,js] +---- +GET /my-index-000001/_count +{ + "query": { + "exists": { + "field": "my-field" + } + } +} +---- + +If the field is aggregatable, you can use https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-aggregations[aggregations] +to check the field's values. For `keyword` fields, you can use a `terms` +aggregation to retrieve the field's most common values: + +[source,js] +---- +GET /my-index-000001/_search?filter_path=aggregations +{ + "size": 0, + "aggs": { + "top_values": { + "terms": { + "field": "my-field", + "size": 10 + } + } + } +} +---- + +For numeric fields, you can use {ref}/search-aggregations-metrics-stats-aggregation.html[stats aggregation] to get an idea of the field's value distribution: + +[source,js] +---- +GET /my-index-000001/_search?filter_path=aggregations +{ + "aggs": { + "my-num-field-stats": { + "stats": { + "field": "my-num-field" + } + } + } +} +---- + +If the field does not return any values, check the data ingestion process. +The field may have a different name. + +[discrete] +[[dev-tools-troubleshooting-check-the-latest-value]] +== Check the latest value + +For time-series data, confirm there is non-filtered data within the attempted +time range. +For example, if you are trying to query the latest data for the +`@timestamp` field, run the following to see if the max `@timestamp` falls +within the attempted range: + +[source,js] +---- +GET /my-index-000001/_search?sort=@timestamp:desc&size=1 +---- + +[discrete] +[[dev-tools-troubleshooting-validate-explain-and-profile-queries]] +== Validate, explain, and profile queries + +When a query returns unexpected results, Elasticsearch offers several tools to +investigate why. + +The https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-validate-query[**Validate API**] +enables you to validate a query. +Use the `rewrite` parameter to return the Lucene query an Elasticsearch query is +rewritten into: + +[source,js] +---- +GET /my-index-000001/_validate/query?rewrite=true +{ + "query": { + "match": { + "user.id": { + "query": "kimchy", + "fuzziness": "auto" + } + } + } +} +---- + +Use the {ref}/search-explain.html[**Explain API**] to find out why a +specific document matches or doesn’t match a query: + +[source,js] +---- +GET /my-index-000001/_explain/0 +{ + "query" : { + "match" : { "message" : "elasticsearch" } + } +} +---- + +The {ref}/search-profile.html[**Profile API**] +provides detailed timing information about a search request. +For a visual representation of the results, use the +<>. + +[NOTE] +==== +To troubleshoot queries, select **Inspect** in the toolbar. +Next, select **Request**. +You can now copy the query sent to {es} for further analysis in Console. +==== + +[discrete] +[[dev-tools-troubleshooting-check-index-settings]] +== Check index settings + +Index settings +can influence search results. +For example, the `index.query.default_field` setting, which determines the field +that is queried when a query specifies no explicit field. +Use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-get-settings[**Get index settings API**] +to retrieve the settings for an index: + +[source,bash] +---- +GET /my-index-000001/_settings +---- + +You can update dynamic index settings with the +https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-settings[**Update index settings API**]. +Changing dynamic index settings for a data stream + requires changing the index template used by the data stream. + +For static settings, you need to create a new index with the correct settings. +Next, you can reindex the data into that index. + +//// +/*For data streams, refer to Change a static index setting +for a data stream */ +//// diff --git a/serverless/pages/developer-tools-troubleshooting.mdx b/serverless/pages/devtools/developer-tools-troubleshooting.mdx similarity index 100% rename from serverless/pages/developer-tools-troubleshooting.mdx rename to serverless/pages/devtools/developer-tools-troubleshooting.mdx diff --git a/serverless/pages/devtools/general-developer-tools.asciidoc b/serverless/pages/devtools/general-developer-tools.asciidoc new file mode 100644 index 0000000000..04bc980852 --- /dev/null +++ b/serverless/pages/devtools/general-developer-tools.asciidoc @@ -0,0 +1,27 @@ +[[-serverless-devtools-developer-tools]] += Developer tools + +:description: Use our developer tools to interact with your data. +:keywords: serverless, dev tools, overview + +preview:[] + +|=== +| Feature | Description | Available in + +| https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[Console] +| Interact with Elastic REST APIs. +| + +| https://www.elastic.co/docs/current/serverless/devtools/profile-queries-and-aggregations[{searchprofiler}] +| Inspect and analyze your search queries. +| + +| https://www.elastic.co/docs/current/serverless/devtools/debug-grok-expressions[Grok Debugger] +| Build and debug grok patterns before you use them in your data processing pipelines. +| + +| https://www.elastic.co/docs/current/serverless/devtools/debug-painless-scripts[Painless Lab] +| Use an interactive code editor to test and debug Painless scripts in real time. +| +|=== diff --git a/serverless/pages/general-developer-tools.mdx b/serverless/pages/devtools/general-developer-tools.mdx similarity index 100% rename from serverless/pages/general-developer-tools.mdx rename to serverless/pages/devtools/general-developer-tools.mdx diff --git a/serverless/pages/devtools/index.asciidoc b/serverless/pages/devtools/index.asciidoc new file mode 100644 index 0000000000..a40fcfc485 --- /dev/null +++ b/serverless/pages/devtools/index.asciidoc @@ -0,0 +1,16 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + += Dev tools + +include::./run-api-requests-in-the-console.asciidoc[leveloffset=+1] + +include::./profile-queries-and-aggregations.asciidoc[leveloffset=+1] + +include::./debug-grok-expressions.asciidoc[leveloffset=+1] + +include::./debug-painless-scripts.asciidoc[leveloffset=+1] + +include::./developer-tools-troubleshooting.asciidoc[leveloffset=+1] diff --git a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc new file mode 100644 index 0000000000..c3f447308a --- /dev/null +++ b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc @@ -0,0 +1,333 @@ +[[profile-queries-and-aggregations]] += Search Profiler + +:description: Diagnose and debug poorly performing search queries. +:keywords: serverless, dev tools, how-to + +preview:[] + +This content applies to: + +//// +/* TODO: The following content was copied verbatim from the ES docs on Oct 5, 2023. It should be included through +transclusion. */ +//// + +{es} has a powerful {ref}/search-profile.html[Profile API] for debugging search queries. +It provides detailed timing information about the execution of individual components in a search request. +This allows users to optimize queries for better performance. + +However, Profile API responses can be hard to read, especially for complex queries. +**{searchprofiler}** helps you visualize these responses in a graphical interface. + +[discrete] +[[search-profiler-getting-started]] +== Get started + +Access **{searchprofiler}** under **Dev Tools**. + +**{searchprofiler}** displays the names of the indices searched and how long it took for the query to complete. +Test it out by replacing the default `match_all` query with the query you want to profile, and then select **Profile**. + +The following example shows the results of profiling the `match_all` query. +If you take a closer look at the information for the `.security_7` sample index, the +**Cumulative time** field shows you that the query took 0.028ms to execute. + +[role="screenshot"] +image::images/profiler-overview.png[{searchprofiler} match_all example] + +//// +/* + +The cumulative time metric is the sum of individual shard times. +It is not necessarily the actual time it took for the query to return (wall clock time). +Because shards might be processed in parallel on multiple nodes, the wall clock time can +be significantly less than the cumulative time. +However, if shards are colocated on the same node and executed serially, the wall clock time is closer to the cumulative time. + +While the cumulative time metric is useful for comparing the performance of your +indices and shards, it doesn't necessarily represent the actual physical query times. + + */ +//// + +// Commenting out for moment, given shards and nodes are obfuscated concepts in serverless + +To see more profiling information, select **View details**. +You'll find details about query components and the timing +breakdown of low-level methods. +For more information, refer to {ref}/search-profile.html#profiling-queries[Profiling queries] in the {es} documentation. + +[discrete] +[[profile-queries-and-aggregations-filter-for-an-index-or-type]] +== Filter for an index or type + +By default, all queries executed by the **{searchprofiler}** are sent +to `GET /_search`. +It searches across your entire cluster (all indices, all types). + +To query a specific index or type, you can use the **Index** filter. + +In the following example, the query is executed against the indices `.security-7` and `kibana_sample_data_ecommerce`. +This is equivalent to making a request to `GET /.security-7,kibana_sample_data_ecommerce/_search`. + +[role="screenshot"] +image::images/profiler-filter.png["Filtering by index and type"] + +[discrete] +[[profile-complicated-query]] +== Profile a more complicated query + +To understand how the query trees are displayed inside the **{searchprofiler}**, +take a look at a more complicated query. + +. Index the following data using **Console**: ++ +[source,js] +---- +POST test/_bulk +{"index":{}} +{"name":"aaron","age":23,"hair":"brown"} +{"index":{}} +{"name":"sue","age":19,"hair":"red"} +{"index":{}} +{"name":"sally","age":19,"hair":"blonde"} +{"index":{}} +{"name":"george","age":19,"hair":"blonde"} +{"index":{}} +{"name":"fred","age":69,"hair":"blonde"} +---- ++ +// CONSOLE +. From the **{searchprofiler}**, enter **test** in the **Index** field to restrict profiled +queries to the `test` index. +. Replace the default `match_all` query in the query editor with a query that has two sub-query +components and includes a simple aggregation: ++ +[source,js] +---- +{ + "query": { + "bool": { + "should": [ + { + "match": { + "name": "fred" + } + }, + { + "terms": { + "name": [ + "sue", + "sally" + ] + } + } + ] + } + }, + "aggs": { + "stats": { + "stats": { + "field": "price" + } + } + } +} +---- ++ +// NOTCONSOLE +. Select **Profile** to profile the query and visualize the results. ++ +[role="screenshot"] +image::images/profiler-gs8.png[Profiling the more complicated query] ++ +** The top `BooleanQuery` component corresponds to the `bool` in the query. +** The second `BooleanQuery` corresponds to the `terms` query, which is internally +converted to a `Boolean` of `should` clauses. It has two child queries that correspond +to "sally" and "sue from the `terms` query. +** The `TermQuery` that's labeled with "name:fred" corresponds to `match: fred` in the query. ++ +In the time columns, the **Self time** and **Total time** are no longer +identical on all rows: ++ +** **Self time** represents how long the query component took to execute. +** **Total time** is the time a query component and all its children took to execute. ++ +Therefore, queries like the Boolean queries often have a larger total time than self time. +. Select **Aggregation Profile** to view aggregation profiling statistics. ++ +This query includes a `stats` agg on the `"age"` field. +The **Aggregation Profile** tab is only enabled when the query being profiled contains an aggregation. +. Select **View details** to view the timing breakdown. ++ +[role="screenshot"] +image::images/profiler-gs10.png["Drilling into the first shard's details"] ++ +For more information about how the **{searchprofiler}** works, how timings are calculated, and +how to interpret various results, refer to +{ref}/search-profile.html#profiling-queries[Profiling queries] in the {es} documentation. + +[discrete] +[[profiler-render-JSON]] +== Render pre-captured profiler JSON + +Sometimes you might want to investigate performance problems that are temporal in nature. +For example, a query might only be slow at certain time of day when many customers are using your system. +You can set up a process to automatically profile slow queries when they occur and then +save those profile responses for later analysis. + +The **{searchprofiler}** supports this workflow by allowing you to paste the +pre-captured JSON in the query editor. +The **{searchprofiler}** will detect that you +have entered a JSON response (rather than a query) and will render just the visualization, +rather than querying the cluster. + +To see how this works, copy and paste the following profile response into the +query editor and select **Profile**. + +[source,js] +---- +{ + "took": 3, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 1.3862944, + "hits": [ + { + "_index": "test", + "_type": "test", + "_id": "AVi3aRDmGKWpaS38wV57", + "_score": 1.3862944, + "_source": { + "name": "fred", + "age": 69, + "hair": "blonde" + } + } + ] + }, + "profile": { + "shards": [ + { + "id": "[O-l25nM4QN6Z68UA5rUYqQ][test][0]", + "searches": [ + { + "query": [ + { + "type": "BooleanQuery", + "description": "+name:fred #(ConstantScore(*:*))^0.0", + "time": "0.5884370000ms", + "breakdown": { + "score": 7243, + "build_scorer_count": 1, + "match_count": 0, + "create_weight": 196239, + "next_doc": 9851, + "match": 0, + "create_weight_count": 1, + "next_doc_count": 2, + "score_count": 1, + "build_scorer": 375099, + "advance": 0, + "advance_count": 0 + }, + "children": [ + { + "type": "TermQuery", + "description": "name:fred", + "time": "0.3016880000ms", + "breakdown": { + "score": 4218, + "build_scorer_count": 1, + "match_count": 0, + "create_weight": 132425, + "next_doc": 2196, + "match": 0, + "create_weight_count": 1, + "next_doc_count": 2, + "score_count": 1, + "build_scorer": 162844, + "advance": 0, + "advance_count": 0 + } + }, + { + "type": "BoostQuery", + "description": "(ConstantScore(*:*))^0.0", + "time": "0.1223030000ms", + "breakdown": { + "score": 0, + "build_scorer_count": 1, + "match_count": 0, + "create_weight": 17366, + "next_doc": 0, + "match": 0, + "create_weight_count": 1, + "next_doc_count": 0, + "score_count": 0, + "build_scorer": 102329, + "advance": 2604, + "advance_count": 2 + }, + "children": [ + { + "type": "MatchAllDocsQuery", + "description": "*:*", + "time": "0.03307600000ms", + "breakdown": { + "score": 0, + "build_scorer_count": 1, + "match_count": 0, + "create_weight": 6068, + "next_doc": 0, + "match": 0, + "create_weight_count": 1, + "next_doc_count": 0, + "score_count": 0, + "build_scorer": 25615, + "advance": 1389, + "advance_count": 2 + } + } + ] + } + ] + } + ], + "rewrite_time": 168640, + "collector": [ + { + "name": "CancellableCollector", + "reason": "search_cancelled", + "time": "0.02952900000ms", + "children": [ + { + "name": "SimpleTopScoreDocCollector", + "reason": "search_top_hits", + "time": "0.01931700000ms" + } + ] + } + ] + } + ], + "aggregations": [] + } + ] + } +} +---- + +// NOTCONSOLE + +Your output should look similar to this: + +[role="screenshot"] +image::images/profiler-json.png[Rendering pre-captured profiler JSON] diff --git a/serverless/pages/profile-queries-and-aggregations.mdx b/serverless/pages/devtools/profile-queries-and-aggregations.mdx similarity index 100% rename from serverless/pages/profile-queries-and-aggregations.mdx rename to serverless/pages/devtools/profile-queries-and-aggregations.mdx diff --git a/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc b/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc new file mode 100644 index 0000000000..65f9522553 --- /dev/null +++ b/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc @@ -0,0 +1,215 @@ +[[run-api-requests-in-the-console]] += Console + +:description: Use the Console to interact with Elastic REST APIs. +:keywords: serverless, dev tools, how-to + +preview:[] + +This content applies to: + +**Console** lets you interact with https://www.elastic.co/docs/api[Elasticsearch and Kibana serverless APIs] from your project. + +Requests are made in the left pane, and responses are displayed in the right pane. + +[role="screenshot"] +image::images/console.png[Console request/response pair] + +To go to **Console**, find **Dev Tools** in the navigation menu or use the global search bar. + +You can also find Console directly on your Elasticsearch serverless project pages, where you can expand it from the footer. This Console, called **Persistent Console**, has the same capabilities and shares the same history as the Console in **Dev Tools**. + +[discrete] +[[run-api-requests-in-the-console-write-requests]] +== Write requests + +**Console** understands commands in a cURL-like syntax. +For example, the following is a `GET` request to the {es} `_search` API. + +[source,js] +---- +GET /_search +{ + "query": { + "match_all": {} + } +} +---- + +Here is the equivalent command in cURL: + +[source,bash] +---- +curl "${ES_URL}/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query": { + "match_all": {} + } +}' +---- + +[discrete] +[[run-api-requests-in-the-console-autocomplete]] +=== Autocomplete + +When you're typing a command, **Console** makes context-sensitive suggestions. +These suggestions show you the parameters for each API and speed up your typing. + +You can configure your preferences for autocomplete in the <>. + +[discrete] +[[run-api-requests-in-the-console-comments]] +=== Comments + +You can write comments or temporarily disable parts of a request by using double forward slashes (`//`) or pound (`#`) signs to create single-line comments. + +[source,js] +---- +# This request searches all of your indices. +GET /_search +{ + // The query parameter indicates query context. + "query": { + "match_all": {} // Matches all documents. + } +} +---- + +You can also use a forward slash followed by an asterisk (`/*`) to mark the beginning of multi-line +comments. +An asterisk followed by a forward slash (`*/`) marks the end. + +[source,js] +---- +GET /_search +{ + "query": { + /*"match_all": { + "boost": 1.2 + }*/ + "match_none": {} + } +} +---- + +[discrete] +[[run-api-requests-in-the-console-variables]] +=== Variables + +Select **Variables** to create, edit, and delete variables. + +[role="screenshot"] +image::images/variables.png[Variables] + +You can refer to these variables in the paths and bodies of your requests. +Each variable can be referenced multiple times. + +[source,js] +---- +GET ${pathVariable} +{ + "query": { + "match": { + "${bodyNameVariable}": "${bodyValueVariable}" + } + } +} +---- + +By default, variables in the body may be substituted as a boolean, number, array, or +object by removing nearby quotes instead of a string with surrounding quotes. Triple +quotes overwrite this default behavior and enforce simple replacement as a string. + +[discrete] +[[run-api-requests-in-the-console-auto-formatting]] +=== Auto-formatting + +The auto-formatting +capability can help you format requests to be more readable. Select one or more requests that you +want to format, open the contextual menu, and then select **Auto indent**. + +[discrete] +[[run-api-requests-in-the-console-keyboard-shortcuts]] +=== Keyboard shortcuts + +**Go to line number**: `Ctrl/Cmd` + `L` + +**Auto-indent current request**: `Ctrl/Cmd` + `I` + +**Jump to next request end**: `Ctrl/Cmd` + `↓` + +**Jump to previous request end**: `Ctrl/Cmd` + `↑` + +**Open documentation for current request**: `Ctrl/Cmd` + `/` + +**Run current request**: `Ctrl/Cmd` + `Enter` + +**Apply current or topmost term in autocomplete menu**: `Enter` or `Tab` + +**Close autocomplete menu**: `Esc` + +**Navigate items in autocomplete menu**: `↓` + `↑` + +[discrete] +[[run-api-requests-in-the-console-view-api-docs]] +=== View API docs + +To view the documentation for an API endpoint, select the request, then open the contextual menu and select +_Open API reference_. + +[discrete] +[[run-api-requests-in-the-console-run-requests]] +== Run requests + +When you're ready to submit the request, select the play button. + +The result of the request execution is displayed in the response panel, where you can see: + +* the JSON response +* the HTTP status code corresponding to the request +* The execution time, in ms. + +You can select multiple requests and submit them together. +**Console** executes the requests one by one. Submitting multiple requests is helpful +when you're debugging an issue or trying query +combinations in multiple scenarios. + +[discrete] +[[run-api-requests-in-the-console-import-and-export-requests]] +== Import and export requests + +You can export requests: + +* **to a TXT file**, by using the **Export requests** button. When using this method, all content of the input panel is copied, including comments, requests, and payloads. All of the formatting is preserved and allows you to re-import the file later, or to a different environment, using the **Import requests** button. ++ +[TIP] +==== +When importing a TXT file containing Console requests, the current content of the input panel is replaced. Export it first if you don't want to lose it, or find it in the **History** tab if you already ran the requests. +==== +* by copying them individually as **curl**, **JavaScript**, or **Python**. To do this, select a request, then open the contextual menu and select **Copy as**. When using this action, requests are copied individually to your clipboard. You can save your favorite language to make the copy action faster the next time you use it. ++ +When running copied requests from an external environment, you'll need to add https://www.elastic.co/docs/api/doc/serverless/authentication[authentication information] to the request. + +[discrete] +[[run-api-requests-in-the-console-get-your-request-history]] +== Get your request history + +_Console_ maintains a list of the last 500 requests that you tried to execute. +To view them, open the _History_ tab. + +You can run a request from your history again by selecting the request and clicking **Add and run**. If you want to add it back to the Console input panel without running it yet, click **Add** instead. It is added to the editor at the current cursor position. + +[discrete] +[[run-api-requests-in-the-console-configure-console-settings]] +== Configure Console settings + +Go to the **Config** tab of **Console** to customize its display, autocomplete, and accessibility settings. + +[discrete] +[[run-api-requests-in-the-console-disable-console]] +== Disable Console + +You can disable the persistent console that shows in the footer of your {es} project pages. To do that, go to **Management** > **Advanced Settings**, and turn off the `devTools:enablePersistentConsole` setting. diff --git a/serverless/pages/run-api-requests-in-the-console.mdx b/serverless/pages/devtools/run-api-requests-in-the-console.mdx similarity index 100% rename from serverless/pages/run-api-requests-in-the-console.mdx rename to serverless/pages/devtools/run-api-requests-in-the-console.mdx diff --git a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc new file mode 100644 index 0000000000..e92fcd0509 --- /dev/null +++ b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc @@ -0,0 +1,235 @@ +[[api-conventions]] += Elasticsearch API conventions + +:description: The {es} REST APIs have conventions for headers and request bodies. +:keywords: serverless, elasticsearch, API, reference + +preview:[] + +You can run {es} API requests in **{dev-tools-app} → Console**. +For example: + +[source,shell] +---- +GET _cat/indices?v=true +---- + +Check out https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[]. + +[discrete] +[[api-conventions-request-headers]] +== Request headers + +When you call {es} APIs outside of the Console, you must provide a request header. +The {es} APIs support the `Authorization`, `Content-Type`, and `X-Opaque-Id` headers. + +[discrete] +[[api-conventions-authorization]] +=== Authorization + +{es} APIs use key-based authentication. +You must create an API key and use the encoded value in the request header. +For example: + +[source,bash] +---- +curl -X GET "${ES_URL}/_cat/indices?v=true" \ + -H "Authorization: ApiKey ${API_KEY}" +---- + +To get API keys or the Elasticsearch Endpoint (`${ES_URL}`) for a project, refer to <>. + +[discrete] +[[api-conventions-content-type]] +=== Content-type + +The type of the content sent in a request body must be specified using the `Content-Type` header. +For example: + +[source,bash] +---- +curl -X GET "${ES_URL}/_search?pretty" \ + -H "Authorization: ApiKey ${API_KEY}" \ + -H "Content-Type: application/json" \ + -d ' + { + "query": { + "match_all": { "boost" : 1.2 } + } + } +' +---- + +The value of this header must map to one of the formats that the API supports. +Most APIs support JSON, YAML, CBOR, and SMILE. +The bulk and multi-search APIs support NDJSON, JSON, and SMILE; other types will result in an error response. + +If you use the `source` query string parameter, you must specify the content type with the `source_content_type` query string parameter. + +{es} APIs support only UTF-8-encoded JSON. +Any other encoding headings sent with a request are ignored. +Responses are also UTF-8 encoded. + +//// +/* +TBD: Is this something you specify in the request header or find in the response header? +### Traceparent + +{es} APIs support a `traceparent` HTTP header using the [official W3C trace context spec](https://www.w3.org/TR/trace-context/#traceparent-header). +You can use the `traceparent` header to trace requests across Elastic products and other services. +Because it's used only for traces, you can safely generate a unique `traceparent` header for each request. + +{es} APIs surface the header's `trace-id` value as `trace.id` in the: + +* JSON {es} server logs +* Slow logs +* Deprecation logs + +For example, a `traceparent` value of `00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01` would produce the following +`trace.id` value in the logs: `0af7651916cd43dd8448eb211c80319c`. +*/ +//// + +[discrete] +[[api-conventions-x-opaque-id]] +=== X-Opaque-Id + +You can pass an `X-Opaque-Id` HTTP header to track the origin of a request in {es} logs and tasks. +For example: + +[source,bash] +---- +curl -X GET "${ES_URL}/_search?pretty" \ + -H "Authorization: ApiKey ${API_KEY}" \ + -H "Content-Type: application/json" \ + -H "X-Opaque-Id: 123456" \ + -d ' + { + "query": { + "match_all": { "boost" : 1.2 } + } + } +' +---- + +{es} surfaces the `X-Opaque-Id` value in the: + +* Response of any request that includes the header +* Task management API response +* Slow logs +* Deprecation logs + +//// +/* MISSING LINKS +* Response of any request that includes the header +* \<\<_identifying_running_tasks,Task management API>> response +* \<\<_identifying_search_slow_log_origin,Slow logs>> +* missing link{/* Deprecation logs +*/ +//// + +For the deprecation logs, {es} also uses the `X-Opaque-Id` value to throttle and deduplicate deprecation warnings. + +//// +/* MISSING LINKS +See \<\<_deprecation_logs_throttling>>. +*/ +//// + +The `X-Opaque-Id` header accepts any arbitrary value. +However, it is recommended that you limit these values to a finite set, such as an ID per client. +Don't generate a unique `X-Opaque-Id` header for every request. +Too many unique `X-Opaque-Id` values can prevent {es} from deduplicating warnings in the deprecation logs. + +[discrete] +[[api-conventions-request-bodies]] +== Request bodies + +A number of {es} APIs with GET operations--most notably the search API--support a request body. +While the GET operation makes sense in the context of retrieving information, GET requests with a body are not supported by all HTTP libraries. + +All {es} APIs with GET operations that require a body can also be submitted as POST requests. +Alternatively, you can pass the request body as the `source` query string parameter when using GET. +When you use this method, the `source_content_type` parameter should also be passed with a media type value that indicates the format of the source, such as `application/json`. + +//// +/* +TBD: The examples in this section don't current seem to work. +Error: no handler found for uri [.../_search?pretty=true] and method [GET]" + +## Date math + +Most {es} APIs that accept an index or index alias argument support date math. +Date math name resolution enables you to search a range of time series indices or index aliases rather than searching all of your indices and filtering the results. +Limiting the number of searched indices reduces cluster load and improves search performance. +For example, if you are searching for errors in your daily logs, you can use a date math name template to restrict the search to the past two days. + +A date math name takes the following form: + +```txt + +``` +- `static_name` is static text. +- `date_math_expr` is a dynamic date math expression that computes the date dynamically. +- `date_format` is the optional format in which the computed date should be rendered. Defaults to `yyyy.MM.dd`. The format should be compatible with [java-time](https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html). +- `time_zone` is the optional time zone. Defaults to `UTC`. + + +For example, `mm` denotes the minute of the hour, while `MM` denotes the month of the year. +Similarly `hh` denotes the hour in the `1-12` range in combination with `AM/PM`, while `HH` denotes the hour in the `0-23` 24-hour range. + + +Date math expressions are resolved independent of the locale. +Consequently, you cannot use any calendars other than the Gregorian calendar. + +You must enclose date math names in angle brackets. +If you use the name in a request path, special characters must be URI encoded. +For example, `` is encoded as `%3Cmy-index-%7Bnow%2Fd%7D%3E`. + +The special characters used for date rounding must be URI encoded. +For example: + +| | | +|---|---| +| `<` | `%3C` | +| `>` | `%3E` | +| `/` | `%2F` | +| `{` | `%7B` | +| `}` | `%7D` | +| `\|` | `%7C` | +| `+` | `%2B` | +| `:` | `%3A` | +| `,` | `%2C` | + +The following example shows different forms of date math names and the final names they resolve to given the current time is 22nd March 2024 noon UTC: + +| Expression | Resolves to | +|---|---| +| `` | `logstash-2024.03.22` | +| `` | `logstash-2024.03.01` | +| `` | `logstash-2024.03` | +| `` | `logstash-2024.02` | +| `` | `logstash-2024.03.23` | + +To use the characters `{` and `}` in the static part of a name template, escape them with a backslash `\`. +For example, `` resolves to `elastic{ON}-2024.03.01` + +The following example shows a search request that searches the {ls} indices for the past three days, assuming the indices use the default {ls} index name format (`logstash-YYYY.MM.dd`): + +```console +# ,, +curl -X GET "${ES_URL}/%3Clogstash-%7Bnow%2Fd-2d%7D%3E%2C%3Clogstash-%7Bnow%2Fd-1d%7D%3E%2C%3Clogstash-%7Bnow%2Fd%7D%3E/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "query" : { + "match": { + "test": "data" + } + } +} +' +``` +*/ +//// diff --git a/serverless/pages/apis-elasticsearch-conventions.mdx b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.mdx similarity index 100% rename from serverless/pages/apis-elasticsearch-conventions.mdx rename to serverless/pages/elasticsearch/apis-elasticsearch-conventions.mdx diff --git a/serverless/pages/elasticsearch/apis-http-apis.asciidoc b/serverless/pages/elasticsearch/apis-http-apis.asciidoc new file mode 100644 index 0000000000..d619057cf0 --- /dev/null +++ b/serverless/pages/elasticsearch/apis-http-apis.asciidoc @@ -0,0 +1,25 @@ +[[http-apis]] += REST APIs + +:description: {es} and {kib} expose REST APIs that can be called directly to configure and access {stack} features. +:keywords: serverless, elasticsearch, http, rest, overview + +preview:[] + + diff --git a/serverless/pages/apis-http-apis.mdx b/serverless/pages/elasticsearch/apis-http-apis.mdx similarity index 100% rename from serverless/pages/apis-http-apis.mdx rename to serverless/pages/elasticsearch/apis-http-apis.mdx diff --git a/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc b/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc new file mode 100644 index 0000000000..443521e70c --- /dev/null +++ b/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc @@ -0,0 +1,88 @@ +[[kibana-api-conventions]] += Management API conventions + +:description: The Management APIs for {serverless-short} have request header conventions. +:keywords: serverless, kibana, API, reference + +preview:[] + +The Management REST APIs for {serverless-full} let you manage resources that are available in multiple solutions. +These resources include connectors, data views, and saved objects. +If you've previously used the {stack}, the Management APIs are similar to {kib} APIs. + +Management API calls are stateless. +Each request that you make happens in isolation from other calls and must include all of the necessary information for {kib} to fulfill the request. +API requests return JSON output, which is a format that is machine-readable and works well for automation. + +To interact with Management APIs, use the following operations: + +* GET: Fetches the information. +* POST: Adds new information. +* PUT: Updates the existing information. +* DELETE: Removes the information. + +You can prepend any Management API endpoint with `kbn:` and run the request in **{dev-tools-app} → Console**. +For example: + +[source,shell] +---- +GET kbn:/api/data_views +---- + +Check out https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[]. + +[discrete] +[[kibana-api-conventions-request-headers]] +== Request headers + +When you call Management APIs outside of the Console, you must provide a request header. +The Management APIs support the `Authorization`, `Content-Type`, and `kbn-xsrf` headers. + +`Authorization: ApiKey`:: + +Management APIs use key-based authentication. +You must create an API key and use the encoded value in the request header. +To learn about creating keys, go to https://www.elastic.co/docs/current/serverless/api-keys[]. + +`Content-Type: application/json`:: + +You must use this header when you send a payload in the API request. +Typically, if you include the `kbn-xsrf` header, you must also include the `Content-Type` header. + +`kbn-xsrf: true`:: + +You must use this header for all API calls except `GET` or `HEAD` operations. + +//// +/* +TBD: Are these settings accessible to users in serverless projects? + +This header is also not required when: +* The path is allowed using the `server.xsrf.allowlist` setting +* XSRF protections are disabled using the `server.xsrf.disableProtection` setting +*/ +//// + +For example: + +[source,bash] +---- +curl -X POST \ + "${KIBANA_URL}/api/data_views/data_view" \ + -H "Authorization: ApiKey ${API_KEY}" \ + -H 'Content-Type: application/json' \ + -H 'kbn-xsrf: true' \ + -d '{ + "data_view": { + "title": "books*", + "name": "My Books Data View" + } + } +' +---- + +//// +/* +TBD: Add instructions for how to obtain the KIBANA_URL +*/ +//// diff --git a/serverless/pages/apis-kibana-conventions.mdx b/serverless/pages/elasticsearch/apis-kibana-conventions.mdx similarity index 100% rename from serverless/pages/apis-kibana-conventions.mdx rename to serverless/pages/elasticsearch/apis-kibana-conventions.mdx diff --git a/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc new file mode 100644 index 0000000000..faff9202b2 --- /dev/null +++ b/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc @@ -0,0 +1,144 @@ +[[dot-net-client-getting-started]] += Get started with the serverless .NET client + +:description: Set up and use the .NET client for {es3}. +:keywords: serverless, elasticsearch, .net, how to + +preview:[] + +This page guides you through the installation process of the +.NET client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[dot-net-client-getting-started-requirements]] +== Requirements + +* .NET Core, .NET 5+ or .NET Framework (4.6.1 and higher). + +[discrete] +[[dot-net-client-getting-started-installation]] +== Installation + +You can install the .NET client with the following command: + +[source,bash] +---- +dotnet add package Elastic.Clients.Elasticsearch.Serverless +---- + +[discrete] +[[dot-net-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,net] +---- +var client = new ElasticsearchClient("", new ApiKey("")); +---- + +To get API keys or the Elasticsearch Endpoint for a project, see <>. + +[discrete] +[[dot-net-client-getting-started-using-the-api]] +== Using the API + +After you've initialized the client, you can create an index and start ingesting +documents. + +[discrete] +[[dot-net-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +The following is an example of creating a `my_index` index: + +[source,net] +---- +var response = await client.Indices.CreateAsync("my_index"); +---- + +This is a simple way of indexing a document into `my_index`: + +[source,net] +---- +var doc = new MyDoc +{ + Id = 1, + User = "xyz_user", + Message = "Trying out the client, so far so good?" +}; + +var response = await client.IndexAsync(doc, "my_index"); +---- + +[discrete] +[[dot-net-client-getting-started-getting-documents]] +=== Getting documents + +You can get documents by using the following code: + +[source,net] +---- +var response = await client.GetAsync(id, idx => idx.Index("my_index")); + +if (response.IsValidResponse) +{ + var doc = response.Source; +} +---- + +[discrete] +[[dot-net-client-getting-started-searching]] +=== Searching + +This is how you can create a single match query with the .NET client: + +[source,net] +---- +var response = await client.SearchAsync(s => s + .Index("my_index") + .From(0) + .Size(10) + .Query(q => q + .Term(t => t.User, "flobernd") + ) +); + +if (response.IsValidResponse) +{ + var doc = response.Documents.FirstOrDefault(); +} +---- + +[discrete] +[[dot-net-client-getting-started-updating-a-document]] +=== Updating a document + +This is how you can update a document, for example to add a new field: + +[source,net] +---- +doc.Message = "This is a new message"; + +var response = await client.UpdateAsync("my_index", 1, u => u + .Doc(doc)); +---- + +[discrete] +[[dot-net-client-getting-started-deleting-a-document]] +=== Deleting a document + +[source,net] +---- +var response = await client.DeleteAsync("my_index", 1); +---- + +[discrete] +[[dot-net-client-getting-started-deleting-an-index]] +=== Deleting an index + +[source,net] +---- +var response = await client.Indices.DeleteAsync("my_index"); +---- diff --git a/serverless/pages/clients-dot-net-getting-started.mdx b/serverless/pages/elasticsearch/clients-dot-net-getting-started.mdx similarity index 100% rename from serverless/pages/clients-dot-net-getting-started.mdx rename to serverless/pages/elasticsearch/clients-dot-net-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc new file mode 100644 index 0000000000..9d3e782e6f --- /dev/null +++ b/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc @@ -0,0 +1,236 @@ +[[go-client-getting-started]] += Get started with the serverless Go Client + +:description: Set up and use the Go client for {es3}. +:keywords: serverless, elasticsearch, go, how to + +preview:[] + +This page guides you through the installation process of the Go +client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[go-client-getting-started-requirements]] +== Requirements + +* Go 1.20 or higher installed on your system. + +[discrete] +[[go-client-getting-started-installation]] +== Installation + +[discrete] +[[go-client-getting-started-using-the-command-line]] +=== Using the command line + +You can install the Go client with the following +commands: + +[source,bash] +---- +go get -u github.com/elastic/elasticsearch-serverless-go@latest +---- + +[discrete] +[[go-client-getting-started-imports]] +== Imports + +The following snippets use these imports: + +[source,go] +---- +import ( + "context" + "encoding/json" + "fmt" + "log" + "strconv" + + "github.com/elastic/elasticsearch-serverless-go" + "github.com/elastic/elasticsearch-serverless-go/typedapi/types" + "github.com/elastic/elasticsearch-serverless-go/typedapi/types/enums/result" +) +---- + +[discrete] +[[go-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,go] +---- +client, err := elasticsearch.NewClient(elasticsearch.Config{ + APIKey: "you_api_key", + Address: "https://my-project-url", +}) +if err != nil { + log.Fatal(err) +} +---- + +To get API keys or the Elasticsearch Endpoint for a project, see <>. + +[discrete] +[[go-client-getting-started-using-the-api]] +== Using the API + +After you've initialized the client, you can start ingesting documents. You can +use the `bulk` API for this. This API enables you to index, update, and delete +several documents in one request. + +[discrete] +[[go-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +You can call the `bulk` API with a body parameter, an array of hashes that +define the action, and a document. + +The following is an example of indexing some classic books into the `books` +index: + +[source,go] +---- +type Book struct { + Name string `json:"name"` + Author string `json:"author"` + ReleaseDate string `json:"release_date"` + PageCount int `json:"page_count"` +} + +books := []Book{ + {Name: "Snow Crash", Author: "Neal Stephenson", ReleaseDate: "1992-06-01", PageCount: 470}, + {Name: "Revelation Space", Author: "Alastair Reynolds", ReleaseDate: "2000-03-15", PageCount: 585}, + {Name: "1984", Author: "George Orwell", ReleaseDate: "1949-06-08", PageCount: 328}, + {Name: "Fahrenheit 451", Author: "Ray Bradbury", ReleaseDate: "1953-10-15", PageCount: 227}, + {Name: "Brave New World", Author: "Aldous Huxley", ReleaseDate: "1932-06-01", PageCount: 268}, + {Name: "The Handmaid's Tale", Author: "Margaret Atwood", ReleaseDate: "1985-06-01", PageCount: 311}, +} +indexName := "books" + +bulk := client.Bulk() +for i, book := range books { + id := strconv.Itoa(i) + err := bulk.CreateOp(types.CreateOperation{Index_: &indexName, Id_: &id}, book) + if err != nil { + log.Fatal(err) + } +} +bulkRes, err := bulk.Do(context.TODO()) +if err != nil { + log.Fatal(err) +} + +fmt.Printf("Bulk: %#v\n", bulkRes.Items) +---- + +When you use the client to make a request to {es}, it returns an API +response object. You can access the body values directly as seen on +the previous example with `bulkRes`. + +[discrete] +[[go-client-getting-started-getting-documents]] +=== Getting documents + +You can get documents by using the following code: + +[source,go] +---- +getRes, err := client.Get(indexName, "5").Do(context.TODO()) +if err != nil { + log.Fatal(err) +} +book := Book{} +if err := json.Unmarshal(getRes.Source_, &book); err != nil { + log.Fatal(err) +} +fmt.Printf("Get book: %#v\n", book) +---- + +[discrete] +[[go-client-getting-started-searching]] +=== Searching + +Now that some data is available, you can search your documents using the +`search` API: + +[source,go] +---- +searchRes, err := client.Search(). + Index("books"). + Q("snow"). + Do(context.TODO()) +if err != nil { + log.Fatal(err) +} + +bookSearch := []Book{} +for _, hit := range searchRes.Hits.Hits { + book := Book{} + if err := json.Unmarshal(hit.Source_, &book); err != nil { + log.Fatal(err) + } + bookSearch = append(bookSearch, book) +} +fmt.Printf("Search books: %#v\n", bookSearch) +---- + +[discrete] +[[go-client-getting-started-updating-a-document]] +=== Updating a document + +You can call the `Update` API to update a document, in this example updating the +`page_count` for "The Handmaid's Tale" with id "5": + +[source,go] +---- +updateRes, err := client.Update("books", "5"). + Doc( + struct { + PageCount int `json:"page_count"` + }{PageCount: 312}, + ). + Do(context.TODO()) +if err != nil { + log.Fatal(err) +} + +if updateRes.Result == result.Updated { + fmt.Printf("Update book: %#v\n", updateRes) +} +---- + +[discrete] +[[go-client-getting-started-deleting-a-document]] +=== Deleting a document + +You can call the `Delete` API to delete a document: + +[source,go] +---- +deleteRes, err := client.Delete("books", "5").Do(context.TODO()) +if err != nil { + log.Fatal(err) +} + +if deleteRes.Result == result.Deleted { + fmt.Printf("Delete book: %#v\n", deleteRes) +} +---- + +[discrete] +[[go-client-getting-started-deleting-an-index]] +=== Deleting an index + +[source,go] +---- +indexDeleteRes, err := client.Indices.Delete("books").Do(context.TODO()) +if err != nil { + log.Fatal(err) +} + +if indexDeleteRes.Acknowledged { + fmt.Printf("Delete index: %#v\n", indexDeleteRes) +} +---- diff --git a/serverless/pages/clients-go-getting-started.mdx b/serverless/pages/elasticsearch/clients-go-getting-started.mdx similarity index 100% rename from serverless/pages/clients-go-getting-started.mdx rename to serverless/pages/elasticsearch/clients-go-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc new file mode 100644 index 0000000000..d04d1a4291 --- /dev/null +++ b/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc @@ -0,0 +1,195 @@ +[[java-client-getting-started]] += Get started with the serverless Java client + +:description: Set up and use the Java client for {es3}. +:keywords: serverless, elasticsearch, java, how to + +preview:[] + +This page guides you through the installation process of the Java +client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[java-client-getting-started-requirements]] +== Requirements + +* Java 8 or later. +* A JSON object mapping library to allow seamless integration of +your application classes with the {es} API. The examples below +show usage with Jackson. + +[discrete] +[[java-client-getting-started-installation]] +== Installation + +You can add the Java client to your Java project using +either Gradle or Maven. + +[discrete] +[[java-client-getting-started-using-gradle]] +=== Using Gradle + +You can install the Java client as a Gradle dependency: + +[source,groovy] +---- +dependencies { + implementation 'co.elastic.clients:elasticsearch-java-serverless:1.0.0-20231031' + implementation 'com.fasterxml.jackson.core:jackson-databind:2.17.0' +} +---- + +[discrete] +[[java-client-getting-started-using-maven]] +=== Using Maven + +You can install the Java client as a Maven dependency, add +the following to the `pom.xml` of your project: + +[source,xml] +---- + + + + + co.elastic.clients + elasticsearch-java-serverless + 1.0.0-20231031 + + + + com.fasterxml.jackson.core + jackson-databind + 2.17.0 + + + + +---- + +[discrete] +[[java-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,java] +---- +// URL and API key +String serverUrl = "https://...elastic.cloud"; +String apiKey = "VnVhQ2ZHY0JDZGJrU..."; + +// Create the low-level client +RestClient restClient = RestClient + .builder(HttpHost.create(serverUrl)) + .setDefaultHeaders(new Header[]{ + new BasicHeader("Authorization", "ApiKey " + apiKey) + }) + .build(); + +// Create the transport with a Jackson mapper +ElasticsearchTransport transport = new RestClientTransport( + restClient, new JacksonJsonpMapper()); + +// And create the API client +ElasticsearchClient esClient = new ElasticsearchClient(transport); +---- + +To get API keys or the Elasticsearch Endpoint for a project, see <>. + +[discrete] +[[java-client-getting-started-using-the-api]] +== Using the API + +After you initialized the client, you can start ingesting documents. + +[discrete] +[[java-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +The following is an example of indexing a document, here a `Product` application +object in the `products` index: + +[source,java] +---- +Product product = new Product("bk-1", "City bike", 123.0); + +IndexResponse response = esClient.index(i -> i + .index("products") + .id(product.getSku()) + .document(product) +); + +logger.info("Indexed with version " + response.version()); +---- + +[discrete] +[[java-client-getting-started-searching]] +=== Searching + +Now that some data is available, you can search your documents using the +`search` API: + +[source,java] +---- +String searchText = "bike"; + +SearchResponse response = esClient.search(s -> s + .index("products") + .query(q -> q + .match(t -> t + .field("name") + .query(searchText) + ) + ), + Product.class +); +---- + +A few things to note in the above example: + +* The search query is built using a hierarchy of lambda expressions that closely +follows the {es} HTTP API. Lambda expressions allows you to be guided +by your IDE's autocompletion, without having to import (or even know!) the +actual classes representing a query. +* The last parameter `Product.class` instructs the client to return results as +`Product` application objects instead of raw JSON. + +[discrete] +[[java-client-getting-started-updating]] +=== Updating + +You can update your documents using the `update` API: + +[source,java] +---- +Product product = new Product("bk-1", "City bike", 123.0); + +esClient.update(u -> u + .index("products") + .id("bk-1") + .upsert(product), + Product.class +); +---- + +[discrete] +[[java-client-getting-started-delete]] +=== Delete + +You can also delete documents: + +[source,java] +---- +esClient.delete(d -> d.index("products").id("bk-1")); +---- + +[discrete] +[[java-client-getting-started-deleting-an-index]] +=== Deleting an index + +[source,java] +---- +esClient.indices().delete(d -> d.index("products")); +---- diff --git a/serverless/pages/clients-java-getting-started.mdx b/serverless/pages/elasticsearch/clients-java-getting-started.mdx similarity index 100% rename from serverless/pages/clients-java-getting-started.mdx rename to serverless/pages/elasticsearch/clients-java-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc new file mode 100644 index 0000000000..7895e007a9 --- /dev/null +++ b/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc @@ -0,0 +1,177 @@ +[[nodejs-client-getting-started]] += Get started with the serverless Node.js client + +:description: Set up and use the Node.js client for {es3}. +:keywords: serverless, elasticsearch, nodejs, how to + +preview:[] + +This page guides you through the installation process of the Node.js +client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[nodejs-client-getting-started-requirements]] +== Requirements + +* Node.js 16 or higher installed on your system. + +[discrete] +[[nodejs-client-getting-started-installation]] +== Installation + +[discrete] +[[nodejs-client-getting-started-using-the-command-line]] +=== Using the command line + +You can install the Node.js client with the following +commands: + +[source,bash] +---- +npm install @elastic/elasticsearch-serverless +---- + +[discrete] +[[nodejs-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,js] +---- +const { Client } = require('@elastic/elasticsearch-serverless') +const client = new Client({ + node: 'https://', // serverless project URL + auth: { apiKey: 'your_api_key' }, // project API key +}) +---- + +To get API keys or the URL for a project, see <>. + +[discrete] +[[nodejs-client-getting-started-using-the-api]] +== Using the API + +After you've initialized the client, you can start ingesting documents. +You can use the `bulk` API for this. +This API enables you to index, update, and delete several documents in one request. + +[discrete] +[[nodejs-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +You can call the `bulk` helper API with a list of documents and a handler for +what action to perform on each document. + +The following is an example of bulk indexing some classic books into the `books` +index: + +[source,js] +---- +// First we build our data: +const body = [ + {name: "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470}, + {name: "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585}, + {name: "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328}, + {name: "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227}, + {name: "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268}, + {name: "The Handmaid's Tale", "author": "Margaret Atwood", "release_date": "1985-06-01", "page_count": 311} +] + +// Then we send the data using the bulk API helper: +const result = await client.helpers.bulk({ + datasource: body, + onDocument (doc) { + // instructs the bulk indexer to add each item in `body` to the books index + // you can optionally inspect each `doc` object to alter what action is performed per document + return { + index: { _index: 'books' } + } + } +}) +---- + +[discrete] +[[nodejs-client-getting-started-getting-documents]] +=== Getting documents + +You can get documents by using the following code: + +[source,js] +---- +await client.get({ + index: 'books', + id: 'a_document_id', +}) +---- + +[discrete] +[[nodejs-client-getting-started-searching]] +=== Searching + +Now that some data is available, you can search your documents using the `search` API: + +[source,js] +---- +const result = await client.search({ + index: 'books', + query: { + match: { + author: 'ray bradbury' + } + } +}) +console.log(result.hits.hits) +---- + +[discrete] +[[nodejs-client-getting-started-updating-a-document]] +=== Updating a document + +You can call the `update` API to update a document: + +[source,js] +---- +await client.update({ + index: 'books', + id: 'a_document_id', + doc: { + author: 'S.E. Hinton', + new_field: 'new value' + } +}) +---- + +[discrete] +[[nodejs-client-getting-started-deleting-a-document]] +=== Deleting a document + +You can call the `delete` API to delete a document: + +[source,js] +---- +await client.delete({ + index: 'books', + id: 'a_document_id', +}) +---- + +[discrete] +[[nodejs-client-getting-started-deleting-an-index]] +=== Deleting an index + +[source,js] +---- +await client.indices.delete({ index: 'books' }) +---- + +[discrete] +[[nodejs-client-getting-started-typescript]] +== TypeScript + +The Node.js client is implemented in TypeScript. IDEs that support +TypeScript-based autocompletion should automatically find and load the +appropriate declaration files in the package's `lib` directory. +The source TypeScript can also be +https://github.com/elastic/elasticsearch-serverless-js/tree/main/src[viewed on GitHub]. diff --git a/serverless/pages/clients-nodejs-getting-started.mdx b/serverless/pages/elasticsearch/clients-nodejs-getting-started.mdx similarity index 100% rename from serverless/pages/clients-nodejs-getting-started.mdx rename to serverless/pages/elasticsearch/clients-nodejs-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc new file mode 100644 index 0000000000..94037b5d1b --- /dev/null +++ b/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc @@ -0,0 +1,212 @@ +[[php-client-getting-started]] += Get started with the serverless PHP client + +:description: Set up and use the PHP client for {es3}. +:keywords: serverless, elasticsearch, php, how to + +preview:[] + +This page guides you through the installation process of the +PHP client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[php-client-getting-started-requirements]] +== Requirements + +* PHP 8.0 or higher installed on your system. + +[discrete] +[[php-client-getting-started-installation]] +== Installation + +[discrete] +[[php-client-getting-started-using-the-command-line]] +=== Using the command line + +You can install the PHP client using +https://getcomposer.org/[composer] with the following commands: + +[source,bash] +---- +composer require elastic/elasticsearch-serverless +---- + +[discrete] +[[php-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,php] +---- +require 'vendor/autoload.php'; + +use Elastic\Elasticsearch\Serverless\ClientBuilder; + +$client = ClientBuilder::create() + ->setEndpoint('') + ->setApiKey('') + ->build(); +---- + +To get API keys or the Elasticsearch Endpoint for a project, see <>. + +[discrete] +[[php-client-getting-started-using-the-api]] +== Using the API + +After you've initialized the client, you can start ingesting documents. You can +use the `bulk` API for this. This API enables you to index, update, and delete +several documents in one request. + +[discrete] +[[php-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +You can call the `bulk` API with a body parameter, an array of actions (index) +and documents. + +The following is an example of indexing some classic books into the `books` +index: + +[source,php] +---- +$body = [ + [ "index" => [ "_index" => "books" ]], + [ "name" => "Snow Crash", "author" => "Neal Stephenson", "release_date" => "1992-06-01", "page_count" => 470], + [ "index" => [ "_index" => "books" ]], + [ "name" => "Revelation Space", "author" => "Alastair Reynolds", "release_date" => "2000-03-15", "page_count" => 585], + [ "index" => [ "_index" => "books" ]], + [ "name" => "1984", "author" => "George Orwell", "release_date" => "1949-06-08", "page_count" => 328], + [ "index" => [ "_index" => "books" ]], + [ "name" => "Fahrenheit 451", "author" => "Ray Bradbury", "release_date" => "1953-10-15", "page_count" => 227], + [ "index" => [ "_index" => "books" ]], + [ "name" => "Brave New World", "author" => "Aldous Huxley", "release_date" => "1932-06-01", "page_count" => 268], + [ "index" => [ "_index" => "books" ]], + [ "name" => "The Handmaid's Tale", "author" => "Margaret Atwood", "release_date" => "1985-06-01", "page_count" => 311] +]; + +$response = $client->bulk(body: $body); +# You can check the response if the items are indexed and have an ID +print_r($response['items']); +---- + +When you use the client to make a request to {es}, it returns an API response +object. This object implements the https://www.php-fig.org/psr/psr-7/[PSR-7] +interface, that means you can check the for the HTTP status using the following +method: + +[source,php] +---- +print($response->getStatusCode()); +---- + +or get the HTTP response headers using the following: + +[source,php] +---- +print_r($response->getHeaders()); +---- + +or reading the HTTP response body as follows: + +[source,php] +---- +print($response->getBody()->getContents()); +# or using the asString() dedicated method +print($response->asString()); +---- + +The response body can be accessed as associative array or as object. + +[source,php] +---- +var_dump($response['items']); # associative array +var_dump($response->items); # object +---- + +There are also methods to render the response as array, object, string and +boolean values. + +[source,php] +---- +var_dump($response->asArray()); // response body content as array +var_dump($response->asObject()); // response body content as object +var_dump($response->asString()); // response body as string (JSON) +var_dump($response->asBool()); // true if HTTP response code between 200 and 300 +---- + +[discrete] +[[php-client-getting-started-getting-documents]] +=== Getting documents + +You can get documents by using the following code: + +[source,php] +---- +$response = $client->get(index: "books", id: $id); +---- + +[discrete] +[[php-client-getting-started-searching]] +=== Searching + +You can search your documents using the `search` API: + +[source,php] +---- +# Search for all the books written by Ray Bradbury +$query = [ 'query' => [ 'match' => [ 'author' => 'Ray Bradbury' ]]]; +$response = $client->search(index: "books", body: $query); + +printf("Documents found: %d\n", $response['hits']['total']['value']); # total documents found +print_r($response['hits']['hits']); # list of books +---- + +For more information about the `search` API's query parameters and the response type, +refer to the +https://www.elastic.co/docs/api/doc/elasticsearch-serverless/group/endpoint-search[Search API] +docs. + +[discrete] +[[php-client-getting-started-updating-documents]] +=== Updating documents + +You can call the `update` API to update a document: + +[source,php] +---- +$id = ''; +# update the "page_count" value to 300 +$body = [ "doc" => [ "page_count" => 300 ]]; +$response = $client->update(index: "books", id: $id, body: $body); +printf("Operation result: %s\n", $response['result']); # You get 'updated' as a result. +---- + +[discrete] +[[php-client-getting-started-deleting-documents]] +=== Deleting documents + +You can call the `delete` API to delete a document: + +[source,php] +---- +$id = ''; +$response = $client->delete(index: "books", id: $id); +printf("Operation result: %s\n", $response['result']); # You get "deleted" a as result. +---- + +[discrete] +[[php-client-getting-started-deleting-an-index]] +=== Deleting an index + +You can delete an entire index as follows: + +[source,php] +---- +$response = $client->indices()->delete(index: "books"); +if ($response['acknowledged']) { + print("Index successfully removed!"); +} +---- diff --git a/serverless/pages/clients-php-getting-started.mdx b/serverless/pages/elasticsearch/clients-php-getting-started.mdx similarity index 100% rename from serverless/pages/clients-php-getting-started.mdx rename to serverless/pages/elasticsearch/clients-php-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc new file mode 100644 index 0000000000..608ff6df12 --- /dev/null +++ b/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc @@ -0,0 +1,157 @@ +[[python-client-getting-started]] += Get started with the serverless Python client + +:description: Set up and use the Python client for {es3}. +:keywords: serverless, elasticsearch, python, how to + +preview:[] + +This page guides you through the installation process of the Python +client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[python-client-getting-started-requirements]] +== Requirements + +* Python 3.7 or higher +* https://pip.pypa.io/en/stable/[`pip`] + +[discrete] +[[python-client-getting-started-documentation]] +== Documentation + +Find the full documentation for the Python client on https://elasticsearch-serverless-python.readthedocs.io/en/latest/[readthedocs]. + +[discrete] +[[python-client-getting-started-installation]] +== Installation + +[discrete] +[[python-client-getting-started-using-the-command-line]] +=== Using the command line + +You can install the Python client with the following +commands: + +[source,bash] +---- +python -m pip install elasticsearch-serverless +---- + +[discrete] +[[python-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,python] +---- +from elasticsearch_serverless import Elasticsearch + +client = Elasticsearch( + "https://...", # Your project's Elasticsearch Endpoint + api_key='api-key', # API key for your project +) +---- + +To get API keys or the Elasticsearch Endpoint for a project, see <>. + +[discrete] +[[python-client-getting-started-using-the-api]] +== Using the API + +After you've initialized the client, you can start ingesting documents. You can use +the `bulk` API for this. This API enables you to index, update, and delete several +documents in one request. + +[discrete] +[[python-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +You can call the `bulk` API with a body parameter, an array of hashes that +define the action, and a document. + +The following is an example of indexing some classic books into the `books` +index: + +[source,python] +---- +from datetime import datetime + +client.bulk( + body=[ + {"index": {"_index": "books", "_id": "1"}}, + {"title": "Infinite Jest", "author": "David Foster Wallace", "published_on": datetime(1996, 2, 1)}, + {"index": {"_index": "books", "_id": "2"}}, + {"title": "Ulysses", "author": "James Joyce", "published_on": datetime(1922, 2, 2)}, + {"index": {"_index": "books", "_id": "3"}}, + {"title": "Just Kids", "author": "Patti Smith", "published_on": datetime(2010, 1, 19)}, + ], +) +---- + +[discrete] +[[python-client-getting-started-getting-documents]] +=== Getting documents + +You can get documents by using the following code: + +[source,python] +---- +response = client.get(index="books", id="1") +print(response.body) +---- + +[discrete] +[[python-client-getting-started-searching]] +=== Searching + +Now that some data is available, you can search your documents using the +`search` API: + +[source,python] +---- +response = client.search(index="books", query={ + "match": { + "title": "infinite" + } +}) + +for hit in response["hits"]["hits"]: + print(hit["_source"]) +---- + +[discrete] +[[python-client-getting-started-updating-a-document]] +=== Updating a document + +You can call the `update` API to update a document: + +[source,python] +---- +client.update(index="books", id="2", doc={ + "author": "James Augustine Aloysius Joyce", + "pages": 732, +}) +---- + +[discrete] +[[python-client-getting-started-deleting-a-document]] +=== Deleting a document + +You can call the `delete` API to delete a document: + +[source,python] +---- +client.delete(index="books", id="3") +---- + +[discrete] +[[python-client-getting-started-deleting-an-index]] +=== Deleting an index + +[source,python] +---- +client.indices.delete(index="books") +---- diff --git a/serverless/pages/clients-python-getting-started.mdx b/serverless/pages/elasticsearch/clients-python-getting-started.mdx similarity index 100% rename from serverless/pages/clients-python-getting-started.mdx rename to serverless/pages/elasticsearch/clients-python-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc new file mode 100644 index 0000000000..bc085f1c0b --- /dev/null +++ b/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc @@ -0,0 +1,216 @@ +[[ruby-client-getting-started]] += Get started with the serverless Ruby client + +:description: Set up and use the Ruby client for {es3}. +:keywords: serverless, elasticsearch, ruby, how to + +preview:[] + +This page guides you through the installation process Ruby +client for {es3}, shows you how to initialize the client, and how to perform basic +{es} operations with it. + +[discrete] +[[ruby-client-getting-started-requirements]] +== Requirements + +* Ruby 3.0 or higher installed on your system. +* To use the `elasticsearch-serverless` gem, you must have an API key and Elasticsearch Endpoint for an {es3} project. +* + +[discrete] +[[ruby-client-getting-started-installation]] +== Installation + +[discrete] +[[ruby-client-getting-started-from-githubs-releases]] +=== From GitHub's releases + +You can install the Ruby Client from RubyGems: + +[source,bash] +---- +gem install elasticsearch-serverless --pre +---- + +Check https://github.com/elastic/elasticsearch-serverless-ruby/releases[releases] +for the latest available versions. + +[discrete] +[[ruby-client-getting-started-from-the-source-code]] +=== From the source code + +You can install the Ruby client from the client's https://github.com/elastic/elasticsearch-serverless-ruby[source +code] with the +following commands: + +[source,bash] +---- +# From the project's root directory: +gem build elasticsearch-serverless.gemspec +gem install elasticsearch-serverless-x.x.x.gem +---- + +[discrete] +[[ruby-client-getting-started-using-the-gemfile]] +=== Using the Gemfile + +Alternatively, you can include the client gem in your Ruby project's Gemfile: + +[source,ruby] +---- +gem 'elasticsearch-serverless' +---- + +Once installed, require it in your code: + +[source,ruby] +---- +require 'elasticsearch-serverless' +---- + +[discrete] +[[ruby-client-getting-started-running-a-ruby-console]] +=== Running a Ruby console + +You can also run the client from a Ruby console using the client's https://github.com/elastic/elasticsearch-serverless-ruby[source +code]. To start the +console, run the following commands: + +[source,bash] +---- +# From the project's root directory: +bundle install +bundle exec rake console +---- + +[discrete] +[[ruby-client-getting-started-initialize-the-client]] +== Initialize the client + +Initialize the client using your API key and Elasticsearch Endpoint: + +[source,ruby] +---- +client = ElasticsearchServerless::Client.new( + api_key: 'your_api_key', + url: 'https://...' +) +---- + +To get API keys or the Elasticsearch Endpoint for a project, see <>. + +[discrete] +[[ruby-client-getting-started-using-the-api]] +== Using the API + +After you've initialized the client, you can start ingesting documents. You can use +the `bulk` API for this. This API enables you to index, update, and delete several +documents in one request. + +[NOTE] +==== +The code examples in this section use the Ruby console. To set up the console, <>. +==== + +[discrete] +[[ruby-client-getting-started-creating-an-index-and-ingesting-documents]] +=== Creating an index and ingesting documents + +You can call the `bulk` API with a body parameter, an array of hashes that +define the action, and a document. + +The following is an example of indexing some classic books into the `books` +index: + +[source,ruby] +---- +# First, build your data: +\> body = [ + { index: { _index: 'books', data: {name: "Snow Crash", author: "Neal Stephenson", release_date: "1992-06-01", page_count: 470} } }, + { index: { _index: 'books', data: {name: "Revelation Space", author: "Alastair Reynolds", release_date: "2000-03-15", page_count: 585} } }, + { index: { _index: 'books', data: {name: "1984", author: "George Orwell", release_date: "1949-06-08", page_count: 328} } }, + { index: { _index: 'books', data: {name: "Fahrenheit 451", author: "Ray Bradbury", release_date: "1953-10-15", page_count: 227} } }, + { index: { _index: 'books', data: {name: "Brave New World", author: "Aldous Huxley", release_date: "1932-06-01", page_count: 268} } }, + { index: { _index: 'books', data: {name: "The Handmaid's Tale", author: "Margaret Atwood", release_date: "1985-06-01", page_count: 311} } } +] +# Then ingest the data via the bulk API: +\> response = client.bulk(body: body) +# You can check the response if the items are indexed and have a document (doc) ID: +\> response['items'] +# Returns: +# => +# [{"index"=>{"_index"=>"books", "_id"=>"Pdink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>0, "_primary_term"=>1, "status"=>201}}, +# {"index"=>{"_index"=>"books", "_id"=>"Ptink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>1, "_primary_term"=>1, "status"=>201}}, +# {"index"=>{"_index"=>"books", "_id"=>"P9ink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>2, "_primary_term"=>1, "status"=>201}}, +# {"index"=>{"_index"=>"books", "_id"=>"QNink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>3, "_primary_term"=>1, "status"=>201}}, +# {"index"=>{"_index"=>"books", "_id"=>"Qdink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>4, "_primary_term"=>1, "status"=>201}}, +# {"index"=>{"_index"=>"books", "_id"=>"Qtink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>5, "_primary_term"=>1, "status"=>201}}] +---- + +When you use the client to make a request to Elasticsearch, it returns an API +response object. You can check the HTTP return code by calling `status` and the +HTTP headers by calling `headers` on the response object. The response object +also behaves as a Hash, so you can access the body values directly as seen on +the previous example with `response['items']`. + +[discrete] +[[ruby-client-getting-started-getting-documents]] +=== Getting documents + +You can get documents by using the following code: + +[source,ruby] +---- +\> client.get(index: 'books', id: 'id') # Replace 'id' with a valid doc ID +---- + +[discrete] +[[ruby-client-getting-started-searching]] +=== Searching + +Now that some data is available, you can search your documents using the +`search` API: + +[source,ruby] +---- +\> response = client.search(index: 'books', q: 'snow') +\> response['hits']['hits'] +# Returns: +# => [{"_index"=>"books", "_id"=>"Pdink4cBmDx329iqhzM2", "_score"=>1.5904956, "_source"=>{"name"=>"Snow Crash", "author"=>"Neal Stephenson", "release_date"=>"1992-06-01", "page_count"=>470}}] +---- + +[discrete] +[[ruby-client-getting-started-updating-a-document]] +=== Updating a document + +You can call the `update` API to update a document: + +[source,ruby] +---- +\> response = client.update( + index: 'books', + id: 'id', # Replace 'id' with a valid doc ID + body: { doc: { page_count: 312 } } +) +---- + +[discrete] +[[ruby-client-getting-started-deleting-a-document]] +=== Deleting a document + +You can call the `delete` API to delete a document: + +[source,ruby] +---- +\> client.delete(index: 'books', id: 'id') # Replace 'id' with a valid doc ID +---- + +[discrete] +[[ruby-client-getting-started-deleting-an-index]] +=== Deleting an index + +[source,ruby] +---- +\> client.indices.delete(index: 'books') +---- diff --git a/serverless/pages/clients-ruby-getting-started.mdx b/serverless/pages/elasticsearch/clients-ruby-getting-started.mdx similarity index 100% rename from serverless/pages/clients-ruby-getting-started.mdx rename to serverless/pages/elasticsearch/clients-ruby-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients.asciidoc b/serverless/pages/elasticsearch/clients.asciidoc new file mode 100644 index 0000000000..67f802eade --- /dev/null +++ b/serverless/pages/elasticsearch/clients.asciidoc @@ -0,0 +1,18 @@ +[[clients]] += Client libraries + +:description: Index, search, and manage {es} data in your preferred language. +:keywords: serverless, elasticsearch, clients, overview + +preview:[] + +{es3} provides official language clients to use {es} REST APIs. +Currently, the following language clients are supported: + +* <> | https://github.com/elastic/elasticsearch-serverless-go[Repository] +* <> | https://github.com/elastic/elasticsearch-java/tree/main/java-client-serverless[Repository] +* <> | https://github.com/elastic/elasticsearch-net[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-js[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-php[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-python[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-ruby[Repository] diff --git a/serverless/pages/clients.mdx b/serverless/pages/elasticsearch/clients.mdx similarity index 100% rename from serverless/pages/clients.mdx rename to serverless/pages/elasticsearch/clients.mdx diff --git a/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc b/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc new file mode 100644 index 0000000000..2ca9c62f7b --- /dev/null +++ b/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc @@ -0,0 +1,21 @@ +[[dev-tools]] += Developer tools + +:description: Elastic tools for developers. +:keywords: serverless, elasticsearch, overview + +preview:[] + +[discrete] +[[dev-tools-developer-tools]] +== Developer tools + +A number of developer tools are available in your project's UI under the **Dev Tools** section. + +* https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[Console]: Make API calls to your Elasticsearch instance using the Query DSL and view the responses. +* https://www.elastic.co/docs/current/serverless/devtools/profile-queries-and-aggregations[Search Profiler]: Inspect and analyze your search queries to identify performance bottlenecks. +* https://www.elastic.co/docs/current/serverless/devtools/debug-grok-expressions[Grok Debugger]: Build and debug grok patterns before you use them in your data processing pipelines. + +// ## Troubleshooting + +// - : Debug your searches using various Elasticsearch APIs. diff --git a/serverless/pages/elasticsearch-developer-tools.mdx b/serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx similarity index 100% rename from serverless/pages/elasticsearch-developer-tools.mdx rename to serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc new file mode 100644 index 0000000000..998a950c53 --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc @@ -0,0 +1,159 @@ +[[explore-your-data-alerting]] += Manage alerting rules + +:description: Define when to generate alerts and notifications with alerting rules. +:keywords: serverless, elasticsearch, alerting, how-to + +++++ +Alerts +++++ + +preview:[] + +In **{alerts-app}** or **{project-settings} → {manage-app} → {rules-app}** you can: + +* Create and edit rules +* Manage rules including enabling/disabling, muting/unmuting, and deleting +* Drill down to rule details +* Configure rule settings + +[role="screenshot"] +image::images/rules-ui.png[Example rule listing in {rules-ui}] + +For an overview of alerting concepts, go to https://www.elastic.co/docs/current/serverless/rules[]. + +//// +/* ## Required permissions + +Access to rules is granted based on your {alert-features} privileges. */ +//// + +//// +/* MISSING LINK: +For more information, go to missing linkSecuritys. */ +//// + +[discrete] +[[explore-your-data-alerting-create-and-edit-rules]] +== Create and edit rules + +When you click the **Create rule** button, it launches a flyout that guides you through selecting a rule type and configuring its conditions and actions. + +[role="screenshot"] +image::images/alerting-overview.png[{rules-ui} app] + +The rule types available in an {es} project are: + +* {kibana-ref}/rule-type-es-query.html[{es} query] +* {kibana-ref}/rule-type-index-threshold.html[Index threshold] +* {kibana-ref}/geo-alerting.html[Tracking containement] +* {ref}/transform-alerts.html[Transform health] + +After a rule is created, you can open the action menu (…) and select **Edit rule** to re-open the flyout and change the rule properties. + +You can also manage rules as resources with the https://registry.terraform.io/providers/elastic/elasticstack/latest[Elasticstack provider] for Terraform. +For more details, refer to the https://registry.terraform.io/providers/elastic/elasticstack/latest/docs/resources/kibana_alerting_rule[elasticstack_kibana_alerting_rule] resource. + +// For details on what types of rules are available and how to configure them, refer to [Rule types]{(kibana-ref}/rule-types.html). + +// missing link + +[discrete] +[[explore-your-data-alerting-snooze-and-disable-rules]] +== Snooze and disable rules + +The rule listing enables you to quickly snooze, disable, enable, or delete individual rules. +For example, you can change the state of a rule: + +[role="screenshot"] +image::images/rule-enable-disable.png[Use the rule status dropdown to enable or disable an individual rule] + +When you snooze a rule, the rule checks continue to run on a schedule but the alert will not trigger any actions. +You can snooze for a specified period of time, indefinitely, or schedule single or recurring downtimes: + +[role="screenshot"] +image::images/rule-snooze-panel.png[Snooze notifications for a rule] + +When a rule is in a snoozed state, you can cancel or change the duration of this state. + +[discrete] +[[explore-your-data-alerting-import-and-export-rules]] +== Import and export rules + +To import and export rules, use https://www.elastic.co/docs/current/serverless/saved-objects[saved objects]. + +//// +/* +TBD: Do stack monitoring rules exist in serverless? +Stack monitoring rules are automatically created for you and therefore cannot be managed in **Saved Objects**. +*/ +//// + +Rules are disabled on export. You are prompted to re-enable the rule on successful import. + +[role="screenshot"] +image::images/rules-imported-banner.png[Rules import banner] + +[discrete] +[[explore-your-data-alerting-view-rule-details]] +== View rule details + +You can determine the health of a rule by looking at its **Last response**. +A rule can have one of the following responses: + +`failed`:: +The rule ran with errors. + +`succeeded`:: +The rule ran without errors. + +`warning`:: +The rule ran with some non-critical errors. + +Click the rule name to access a rule details page: + +[role="screenshot"] +image::images/rule-details-alerts-active.png[Rule details page with multiple alerts] + +In this example, the rule detects when a site serves more than a threshold number of bytes in a 24 hour period. Four sites are above the threshold. These are called alerts - occurrences of the condition being detected - and the alert name, status, time of detection, and duration of the condition are shown in this view. Alerts come and go from the list depending on whether the rule conditions are met. + +When an alert is created, it generates actions. If the conditions that caused the alert persist, the actions run again according to the rule notification settings. There are three common alert statuses: + +`active`:: +The conditions for the rule are met and actions should be generated according to the notification settings. + +`flapping`:: +The alert is switching repeatedly between active and recovered states. + +`recovered`:: +The conditions for the rule are no longer met and recovery actions should be generated. + +.Flapping alerts +[NOTE] +==== +The `flapping` state is possible only if you have enabled alert flapping detection in **{rules-ui}** → **Settings**. A look back window and threshold are used to determine whether alerts are flapping. For example, you can specify that the alert must change status at least 6 times in the last 10 runs. If the rule has actions that run when the alert status changes, those actions are suppressed while the alert is flapping. +==== + +If there are rule actions that failed to run successfully, you can see the details on the **History** tab. +In the **Message** column, click the warning or expand icon or click the number in the **Errored actions** column to open the **Errored Actions** panel. + +// + +//// +/* +TBD: Is this setting still feasible in serverless? +In this example, the action failed because the `xpack.actions.email.domain_allowlist` setting was updated and the action's email recipient is no longer included in the allowlist: + +![Rule history page with alerts that have errored actions](../images/rule-details-errored-actions.png) +*/ +//// + +// If an alert was affected by a maintenance window, its identifier appears in the **Maintenance windows** column. + +You can suppress future actions for a specific alert by turning on the **Mute** toggle. +If a muted alert no longer meets the rule conditions, it stays in the list to avoid generating actions if the conditions recur. +You can also disable a rule, which stops it from running checks and clears any alerts it was tracking. +You may want to disable rules that are not currently needed to reduce the load on your cluster. + +[role="screenshot"] +image::images/rule-details-disabling.png[Use the disable toggle to turn off rule checks and clear alerts tracked] diff --git a/serverless/pages/explore-your-data-alerting.mdx b/serverless/pages/elasticsearch/explore-your-data-alerting.mdx similarity index 100% rename from serverless/pages/explore-your-data-alerting.mdx rename to serverless/pages/elasticsearch/explore-your-data-alerting.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc new file mode 100644 index 0000000000..4d48a9c7ff --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc @@ -0,0 +1,203 @@ +[[explore-your-data-discover-your-data]] += Discover your data + +:description: Learn how to use Discover to gain insights into your data. +:keywords: serverless, elasticsearch, discover data, how to + +preview:[] + +With **Discover**, you can quickly search and filter your data, get information +about the structure of the fields, and display your findings in a visualization. +You can also customize and save your searches and place them on a dashboard. + +[discrete] +[[explore-your-data-discover-your-data-explore-and-query-your-data]] +== Explore and query your data + +This tutorial shows you how to use **Discover** to search large amounts of +data and understand what’s going on at any given time. This tutorial uses the book sample data set from the <>. + +You’ll learn to: + +* **Select** data for your exploration, set a time range for that data, +search it with the {kib} Query Language, and filter the results. +* **Explore** the details of your data, view individual documents, and create tables +that summarize the contents of the data. +* **Present** your findings in a visualization. + +At the end of this tutorial, you’ll be ready to start exploring with your own +data in **Discover**. + +[discrete] +[[explore-your-data-discover-your-data-find-your-data]] +== Find your data + +Tell {kib} where to find the data you want to explore, and then specify the time range in which to view that data. + +. Once the book sample data has been ingested, navigate to **Explore → Discover** and click **Create data view**. +. Give your data view a name. + +[role="screenshot"] +image::images/create-data-view.png[Create a data view] + +. Start typing in the **Index pattern** field, and the names of indices, data streams, and aliases that match your input will be displayed. + +* To match multiple sources, use a wildcard (*), for example, `b*` and any indices starting with the letter `b` display. +* To match multiple sources, enter their names separated by a comma. Do not include a space after the comma. For example `books,magazines` would match two indices: `books` and `magazines`. +* To exclude a source, use a minus sign (-), for example `-books`. + +. In the **Timestamp** field dropdown, and then select `release_date`. + +* If you don't set a time field, you can't use global time filters on your dashboards. Leaving the time field unset might be useful if you have multiple time fields and want to create dashboards that combine visualizations based on different timestamps. +* If your index doesn't have time-based data, choose **I don't want to use the time filter**. + +. Click **Show advanced settings** to: + +* Display hidden and system indices. +* Specify your own data view name. For example, enter your Elasticsearch index alias name. + +. Click **Save data view to {kib}**. +. Adjust the time range to view data for the **Last 40 years** to view all your book data. + +[role="screenshot"] +image::images/book-data.png[Your book data displayed] + +[discrete] +[[explore-fields-in-your-data]] +== Explore the fields in your data + +**Discover** includes a table that shows all the documents that match your search. By default, the document table includes a column for the time field and a column that lists all other fields in the document. You’ll modify the document table to display your fields of interest. + +. In the sidebar, enter `au` in the search field to find the `author` field. +. In the **Available fields** list, click `author` to view its most popular values. + +**Discover** shows the top 10 values and the number of records used to calculate those values. + +. Click image:images/icons/plusInCircleFilled.svg[Add] to toggle the field into the document table. You can also drag the field from the **Available fields** list into the document table. + +[discrete] +[[explore-your-data-discover-your-data-add-a-field-to-your-data-source]] +== Add a field to your {data-source} + +What happens if you forgot to define an important value as a separate field? Or, what if you +want to combine two fields and treat them as one? This is where {ref}/runtime.html[runtime fields] come into play. +You can add a runtime field to your {data-source} from inside of **Discover**, +and then use that field for analysis and visualizations, +the same way you do with other fields. + +. In the sidebar, click **Add a field**. +. In the **Create field** form, enter `hello` for the name. +. Turn on **Set value**. +. Define the script using the Painless scripting language. Runtime fields require an `emit()`. ++ +[source,ts] +---- +emit("Hello World!"); +---- +. Click **Save**. +. In the sidebar, search for the **hello** field, and then add it to the document table. +. Create a second field named `authorabbrev` that combines the authors last name and first initial. ++ +[source,ts] +---- +String str = doc['author.keyword'].value; +char ch1 = str.charAt(0); +emit(doc['author.keyword'].value + ", " + ch1); +---- +. Add `authorabbrev` to the document table. + +[role="screenshot"] +image::images/add-fields.png[How the fields you just created should display] + +[discrete] +[[search-in-discover]] +== Search your data + +One of the unique capabilities of **Discover** is the ability to combine free text search with filtering based on structured data. To search all fields, enter a simple string in the query bar. + +To search particular fields and build more complex queries, use the {kib} Query language. As you type, KQL prompts you with the fields you can search and the operators you can use to build a structured query. + +Search the book data to find out which books have more than 500 pages: + +. Enter `p`, and then select **page_count**. +. Select **>** for greater than and enter **500**, then click the refresh button or press the Enter key to see which books have more than 500 pages. + +[discrete] +[[filter-in-discover]] +== Filter your data + +Whereas the query defines the set of documents you are interested in, +filters enable you to zero in on subsets of those documents. +You can filter results to include or exclude specific fields, filter for a value in a range, +and more. + +Exclude documents where the author is not Terry Pratchett: + +. Click image:images/icons/plusInCircleFilled.svg[Add] next to the query bar. +. In the **Add filter** pop-up, set the field to **author**, the operator to **is not**, and the value to **Terry Pratchett**. +. Click **Add filter**. +. Continue your exploration by adding more filters. +. To remove a filter, click the close icon (x) next to its name in the filter bar. + +[discrete] +[[look-inside-a-document]] +== Look inside a document + +Dive into an individual document to view its fields and the documents that occurred before and after it. + +. In the document table, click the expand icon image:images/icons/expand.svg[View details] to show document details. +. Scan through the fields and their values. If you find a field of interest, hover your mouse over the **Actions** column for filters and other options. +. To create a view of the document that you can bookmark and share, click **Single document**. +. To view documents that occurred before or after the event you are looking at, click **Surrounding documents**. + +[discrete] +[[save-your-search]] +== Save your search for later use + +Save your search so you can use it later to generate a CSV report, create visualizations and Dashboards. Saving a search saves the query text, filters, and current view of **Discover**, including the columns selected in the document table, the sort order, and the {data-source}. + +. In the upper right toolbar, click **Save**. +. Give your search a title. +. Optionally store tags and the time range with the search. +. Click **Save**. + +[discrete] +[[explore-your-data-discover-your-data-visualize-your-findings]] +== Visualize your findings + +If a field can be {ref}/search-aggregations.html[aggregated], you can quickly visualize it from **Discover**. + +. In the sidebar, find and then click `release_date`. +. In the popup, click **Visualize**. + +[NOTE] +==== +{kib} creates a visualization best suited for this field. +==== + +. From the **Available fields** list, drag and drop `page_count` onto the workspace. +. Save your visualization for use on a dashboard. + +For geographical point fields, if you click **Visualize**, your data appears in a map. + +[discrete] +[[share-your-findings]] +== Share your findings + +To share your findings with a larger audience, click **Share** in the upper right toolbar. + +[discrete] +[[alert-from-Discover]] +== Generate alerts + +From **Discover**, you can create a rule to periodically check when data goes above or below a certain threshold within a given time interval. + +. Ensure that your data view, +query, and filters fetch the data for which you want an alert. +. In the toolbar, click **Alerts → Create search threshold rule**. ++ +The **Create rule** form is pre-filled with the latest query sent to {es}. +. Configure your {es} query and select a connector type. +. Click **Save**. + +For more about this and other rules provided in {alert-features}, go to <>. diff --git a/serverless/pages/explore-your-data-discover-your-data.mdx b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx similarity index 100% rename from serverless/pages/explore-your-data-discover-your-data.mdx rename to serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc b/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc new file mode 100644 index 0000000000..e78c44bfcc --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc @@ -0,0 +1,439 @@ +[[explore-your-data-aggregations]] += Aggregations + +:description: Aggregate and summarize your {es} data. +:keywords: serverless, elasticsearch, aggregations, reference + +preview:[] + +An aggregation summarizes your data as metrics, statistics, or other analytics. +Aggregations help you answer questions like: + +* What's the average load time for my website? +* Who are my most valuable customers based on transaction volume? +* What would be considered a large file on my network? +* How many products are in each product category? + +{es} organizes aggregations into three categories: + +* {ref}/search-aggregations-metrics.html[Metric] aggregations that calculate metrics, +such as a sum or an average, from field values. Note that +{ref}/search-aggregations-metrics-scripted-metric-aggregation.html[scripted metric aggregations] +are not available in serverless {es}. +* {ref}/search-aggregations-bucket.html[Bucket] aggregations that +group documents into buckets, also called bins, based on field values, ranges, +or other criteria. +* {ref}/search-aggregations-pipeline.html[Pipeline] aggregations that take input from +other aggregations instead of documents or fields. + +[discrete] +[[explore-your-data-aggregations-run-an-aggregation]] +== Run an aggregation + +You can run aggregations as part of a search by specifying the search API's `aggs` parameter. The +following search runs a {ref}/search-aggregations-bucket-terms-aggregation.html[terms aggregation] on +`my-field`: + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "aggs": { + "my-agg-name": { + "terms": { + "field": "my-field" + } + } + } +} +' +---- + +// TEST[setup:my_index] + +// TEST[s/my-field/http.request.method/] + +Aggregation results are in the response's `aggregations` object: + +// TESTRESPONSE[s/"took": 78/"took": "$body.took"/] + +// TESTRESPONSE[s/\.\.\.$/"took": "$body.took", "timed_out": false, "_shards": "$body._shards", /] + +// TESTRESPONSE[s/"hits": \[\.\.\.\]/"hits": "$body.hits.hits"/] + +// TESTRESPONSE[s/"buckets": \[\]/"buckets":\[\{"key":"get","doc_count":5\}\]/] + +[source,json] +---- +{ + "took": 78, + "timed_out": false, + "_shards": {...}, + "hits": {...}, + "aggregations": { + "my-agg-name": { <1> + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0, + "buckets": [...] + } + } +} +---- + +<1> Results for the `my-agg-name` aggregation. + +[discrete] +[[explore-your-data-aggregations-change-an-aggregations-scope]] +== Change an aggregation's scope + +Use the `query` parameter to limit the documents on which an aggregation runs: + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query": { + "range": { + "@timestamp": { + "gte": "now-1d/d", + "lt": "now/d" + } + } + }, + "aggs": { + "my-agg-name": { + "terms": { + "field": "my-field" + } + } + } +} +' +---- + +// TEST[setup:my_index] + +// TEST[s/my-field/http.request.method/] + +[discrete] +[[explore-your-data-aggregations-return-only-aggregation-results]] +== Return only aggregation results + +By default, searches containing an aggregation return both search hits and +aggregation results. To return only aggregation results, set `size` to `0`: + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "size": 0, + "aggs": { + "my-agg-name": { + "terms": { + "field": "my-field" + } + } + } +} +' +---- + +// TEST[setup:my_index] + +// TEST[s/my-field/http.request.method/] + +[discrete] +[[explore-your-data-aggregations-run-multiple-aggregations]] +== Run multiple aggregations + +You can specify multiple aggregations in the same request: + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "aggs": { + "my-first-agg-name": { + "terms": { + "field": "my-field" + } + }, + "my-second-agg-name": { + "avg": { + "field": "my-other-field" + } + } + } +} +' +---- + +// TEST[setup:my_index] + +// TEST[s/my-field/http.request.method/] + +// TEST[s/my-other-field/http.response.bytes/] + +[discrete] +[[explore-your-data-aggregations-run-sub-aggregations]] +== Run sub-aggregations + +Bucket aggregations support bucket or metric sub-aggregations. For example, a +terms aggregation with an {ref}/search-aggregations-metrics-avg-aggregation.html[avg] +sub-aggregation calculates an average value for each bucket of documents. There +is no level or depth limit for nesting sub-aggregations. + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "aggs": { + "my-agg-name": { + "terms": { + "field": "my-field" + }, + "aggs": { + "my-sub-agg-name": { + "avg": { + "field": "my-other-field" + } + } + } + } + } +} +' +---- + +// TEST[setup:my_index] + +// TEST[s/_search/_search?size=0/] + +// TEST[s/my-field/http.request.method/] + +// TEST[s/my-other-field/http.response.bytes/] + +The response nests sub-aggregation results under their parent aggregation: + +// TESTRESPONSE[s/\.\.\./"took": "$body.took", "timed_out": false, "_shards": "$body._shards", "hits": "$body.hits",/] + +// TESTRESPONSE[s/"key": "foo"/"key": "get"/] + +// TESTRESPONSE[s/"value": 75.0/"value": $body.aggregations.my-agg-name.buckets.0.my-sub-agg-name.value/] + +[source,json] +---- +{ + ... + "aggregations": { + "my-agg-name": { <1> + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0, + "buckets": [ + { + "key": "foo", + "doc_count": 5, + "my-sub-agg-name": { <2> + "value": 75.0 + } + } + ] + } + } +} +---- + +<1> Results for the parent aggregation, `my-agg-name`. + +<2> Results for `my-agg-name`'s sub-aggregation, `my-sub-agg-name`. + +[discrete] +[[explore-your-data-aggregations-add-custom-metadata]] +== Add custom metadata + +Use the `meta` object to associate custom metadata with an aggregation: + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "aggs": { + "my-agg-name": { + "terms": { + "field": "my-field" + }, + "meta": { + "my-metadata-field": "foo" + } + } + } +} +' +---- + +// TEST[setup:my_index] + +// TEST[s/_search/_search?size=0/] + +The response returns the `meta` object in place: + +[source,json] +---- +{ + ... + "aggregations": { + "my-agg-name": { + "meta": { + "my-metadata-field": "foo" + }, + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 0, + "buckets": [] + } + } +} +---- + +// TESTRESPONSE[s/\.\.\./"took": "$body.took", "timed_out": false, "_shards": "$body._shards", "hits": "$body.hits",/] + +[discrete] +[[explore-your-data-aggregations-return-the-aggregation-type]] +== Return the aggregation type + +By default, aggregation results include the aggregation's name but not its type. +To return the aggregation type, use the `typed_keys` query parameter. + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?typed_keys&pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "aggs": { + "my-agg-name": { + "histogram": { + "field": "my-field", + "interval": 1000 + } + } + } +} +' + +---- + +// TEST[setup:my_index] + +// TEST[s/typed_keys/typed_keys&size=0/] + +// TEST[s/my-field/http.response.bytes/] + +The response returns the aggregation type as a prefix to the aggregation's name. + +[IMPORTANT] +==== +Some aggregations return a different aggregation type from the +type in the request. For example, the terms, {ref}/search-aggregations-bucket-significantterms-aggregation.html[significant terms], +and {ref}/search-aggregations-metrics-percentile-aggregation.html[percentiles] +aggregations return different aggregations types depending on the data type of +the aggregated field. +==== + +// TESTRESPONSE[s/\.\.\./"took": "$body.took", "timed_out": false, "_shards": "$body._shards", "hits": "$body.hits",/] + +// TESTRESPONSE[s/"buckets": \[\]/"buckets":\[\{"key":1070000.0,"doc_count":5\}\]/] + +[source,json] +---- +{ + ... + "aggregations": { + "histogram#my-agg-name": { <1> + "buckets": [] + } + } +} +---- + +<1> The aggregation type, `histogram`, followed by a `#` separator and the aggregation's name, `my-agg-name`. + +[discrete] +[[explore-your-data-aggregations-use-scripts-in-an-aggregation]] +== Use scripts in an aggregation + +When a field doesn't exactly match the aggregation you need, you +should aggregate on a {ref}/runtime.html[runtime field]: + +[source,bash] +---- +curl "${ES_URL}/my-index/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "size": 0, + "runtime_mappings": { + "message.length": { + "type": "long", + "script": "emit(doc[\u0027message.keyword\u0027].value.length())" + } + }, + "aggs": { + "message_length": { + "histogram": { + "interval": 10, + "field": "message.length" + } + } + } +} +' +---- + +Scripts calculate field values dynamically, which adds a little +overhead to the aggregation. In addition to the time spent calculating, +some aggregations like {ref}/search-aggregations-bucket-terms-aggregation.html[`terms`] +and {ref}/search-aggregations-bucket-filters-aggregation.html[`filters`] can't use +some of their optimizations with runtime fields. In total, performance costs +for using a runtime field varies from aggregation to aggregation. + +[discrete] +[[explore-your-data-aggregations-aggregation-caches]] +== Aggregation caches + +For faster responses, {es} caches the results of frequently run aggregations in +the {ref}/shard-request-cache.html[shard request cache]. To get cached results, use the +same {ref}/search-shard-routing.html#shard-and-node-preference[`preference` string] for each search. If you +don't need search hits, <> to avoid +filling the cache. + +{es} routes searches with the same preference string to the same shards. If the +shards' data doesn't change between searches, the shards return cached +aggregation results. + +[discrete] +[[explore-your-data-aggregations-limits-for-long-values]] +== Limits for `long` values + +When running aggregations, {es} uses {ref}/number.html[`double`] values to hold and +represent numeric data. As a result, aggregations on `long` numbers +greater than 2^53 are approximate. diff --git a/serverless/pages/explore-your-data-the-aggregations-api.mdx b/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.mdx similarity index 100% rename from serverless/pages/explore-your-data-the-aggregations-api.mdx rename to serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc new file mode 100644 index 0000000000..7ca035d302 --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc @@ -0,0 +1,95 @@ +[[explore-your-data-dashboards]] += Create dashboards + +:description: Create dashboards to visualize and monitor your {es} data. +:keywords: serverless, elasticsearch, dashboards, how to + +preview:[] + +Learn the most common way to create a dashboard from your own data. The tutorial will use sample data from the perspective of an analyst looking at website logs, but this type of dashboard works on any type of data. + +[discrete] +[[open-the-dashboard]] +== Open the dashboard + +Begin with an empty dashboard, or open an existing dashboard. + +. Open the main menu, then click **Dashboard**. +. On the **Dashboards** page, choose one of the following options: + +* To start with an empty dashboard, click **Create dashboard**. ++ +When you create a dashboard, you are automatically in edit mode and can make changes. +* To open an existing dashboard, click the dashboard **Title** you want to open. ++ +When you open an existing dashboard, you are in view mode. To make changes, click **Edit** in the toolbar. + +[discrete] +[[explore-your-data-dashboards-add-data-and-create-a-dashboard]] +== Add data and create a dashboard + +Add the sample web logs data, and create and set up the dashboard. + +. On the **Dashboard** page, click **Add some sample data**. +. Click **Other sample data sets**. +. On the **Sample web logs** card, click **Add data**. + +Create the dashboard where you'll display the visualization panels. + +. Open the main menu, then click **Dashboard**. +. Click **[Logs] Web Traffic**. + +By default some visualization panels have been created for you using the sample data. Go to <> to learn about the different visualizations. + +[role="screenshot"] +image::images/dashboard-example.png[dashboard with default visualizations using sample data] + +[discrete] +[[explore-your-data-dashboards-reset-the-dashboard]] +== Reset the dashboard + +To remove any changes you've made, reset the dashboard to the last saved changes. + +. In the toolbar, click **Reset**. +. Click **Reset dashboard**. + +[discrete] +[[explore-your-data-dashboards-save-dashboards]] +== Save dashboards + +When you've finished making changes to the dashboard, save it. + +. In the toolbar, click **Save**. +. To exit **Edit** mode, click **Switch to view mode**. + +[discrete] +[[explore-your-data-dashboards-add-dashboard-settings]] +== Add dashboard settings + +When creating a new dashboard you can add the title, tags, design options, and more to the dashboard. + +. In the toolbar, click **Settings**. +. On the **Dashboard settings** flyout, enter the **Title** and an optional **Description**. +. Add any applicable **Tags**. +. Specify the following settings: + +* **Store time with dashboard** — Saves the specified time filter. +* **Use margins between panels** — Adds a margin of space between each panel. +* **Show panel titles** — Displays the titles in the panel headers. +* **Sync color palettes across panels** — Applies the same color palette to all panels on the dashboard. +* **Sync cursor across panels** — When you hover your cursor over a panel, the cursor on all other related dashboard charts automatically appears. +* **Sync tooltips across panels** — When you hover your cursor over a panel, the tooltips on all other related dashboard charts automatically appears. + +. Click **Apply**. + +[discrete] +[[explore-your-data-dashboards-share-dashboards]] +== Share dashboards + +To share the dashboard with a larger audience, click **Share** in the toolbar. For detailed information about the sharing options, refer to {kibana-ref}/reporting-getting-started.html[Reporting]. + +[discrete] +[[explore-your-data-dashboards-export-dashboards]] +== Export dashboards + +To automate {kib}, you can export dashboards as JSON using the {kibana-ref}/saved-objects-api-export.html[Export objects API]. It is important to export dashboards with all necessary references. diff --git a/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.mdx b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.mdx similarity index 100% rename from serverless/pages/explore-your-data-visualize-your-data-create-dashboards.mdx rename to serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc new file mode 100644 index 0000000000..1879230f88 --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc @@ -0,0 +1,384 @@ +[[explore-your-data-visualizations]] += Create visualizations + +:description: Create charts, graphs, maps, and more from your {es} data. +:keywords: serverless, elasticsearch, visualize, how to + +preview:[] + +Learn how to create some visualization panels to add to your dashboard. +This tutorial uses the same web logs sample data from <>. + +[discrete] +[[explore-your-data-visualizations-open-the-visualization-editor-and-get-familiar-with-the-data]] +== Open the visualization editor and get familiar with the data + +Once you have loaded the web logs sample data into your dashboard lets open the visualization editor, to ensure the correct fields appear. + +. On the dashboard, click **Create visualization**. +. Make sure the **{kib} Sample Data Logs** {data-source} appears. + +To create the visualizations in this tutorial, you'll use the following fields: + +* **Records** +* **timestamp** +* **bytes** +* **clientip** +* **referer.keyword** + +To see the most frequent values in a field, hover over the field name, then click _i_. + +[discrete] +[[explore-your-data-visualizations-create-your-first-visualization]] +== Create your first visualization + +Pick a field you want to analyze, such as **clientip**. To analyze only the **clientip** field, use the **Metric** visualization to display the field as a number. + +The only number function that you can use with **clientip** is **Unique count**, also referred to as cardinality, which approximates the number of unique values. + +. Open the **Visualization type** dropdown, then select **Metric**. +. From the **Available fields** list, drag **clientip** to the workspace or layer pane. ++ +In the layer pane, **Unique count of clientip** appears because the editor automatically applies the **Unique count** function to the **clientip** field. **Unique count** is the only numeric function that works with IP addresses. +. In the layer pane, click **Unique count of clientip**. ++ +a. In the **Name** field, enter `Unique visitors`. ++ +b. Click **Close**. +. Click **Save and return**. ++ +**[No Title]** appears in the visualization panel header. Since the visualization has its own `Unique visitors` label, you do not need to add a panel title. + +[discrete] +[[explore-your-data-visualizations-view-a-metric-over-time]] +== View a metric over time + +There are two shortcuts you can use to view metrics over time. +When you drag a numeric field to the workspace, the visualization editor adds the default +time field from the {data-source}. When you use the **Date histogram** function, you can +replace the time field by dragging the field to the workspace. + +To visualize the **bytes** field over time: + +. On the dashboard, click **Create visualization**. +. From the **Available fields** list, drag **bytes** to the workspace. ++ +The visualization editor creates a bar chart with the **timestamp** and **Median of bytes** fields. +. To zoom in on the data, click and drag your cursor across the bars. + +To emphasize the change in **Median of bytes** over time, change the visualization type to **Line** with one of the following options: + +* In the **Suggestions**, click the line chart. +* In the editor toolbar, open the **Visualization type** dropdown, then select **Line**. + +To increase the minimum time interval: + +. In the layer pane, click **timestamp**. +. Change the **Minimum interval** to **1d**, then click **Close**. ++ +You can increase and decrease the minimum interval, but you are unable to decrease the interval below the configured **Advanced Settings**. + +To save space on the dashboard, hide the axis labels. + +. Open the **Left axis** menu, then select **None** from the **Axis title** dropdown. +. Open the **Bottom axis** menu, then select **None** from the **Axis title** dropdown. +. Click **Save and return** + +Since you removed the axis labels, add a panel title: + +. Open the panel menu, then select **Panel settings**. +. In the **Title** field, enter `Median of bytes`, then click **Apply**. + +[discrete] +[[explore-your-data-visualizations-view-the-top-values-of-a-field]] +== View the top values of a field + +Create a visualization that displays the most frequent values of **request.keyword** on your website, ranked by the unique visitors. To create the visualization, use **Top values of request.keyword** ranked by **Unique count of clientip**, instead of being ranked by **Count of records**. + +The **Top values** function ranks the unique values of a field by another function. +The values are the most frequent when ranked by a **Count** function, and the largest when ranked by the **Sum** function. + +. On the dashboard, click **Create visualization**. +. From the **Available fields** list, drag **clientip** to the **Vertical axis** field in the layer pane. ++ +The visualization editor automatically applies the **Unique count** function. If you drag **clientip** to the workspace, the editor adds the field to the incorrect axis. +. Drag **request.keyword** to the workspace. ++ +When you drag a text or IP address field to the workspace, the editor adds the **Top values** function ranked by **Count of records** to show the most frequent values. + +The chart labels are unable to display because the **request.keyword** field contains long text fields. You could use one of the **Suggestions**, but the suggestions also have issues with long text. The best way to display long text fields is with the **Table** visualization. + +. Open the **Visualization type** dropdown, then select **Table**. +. In the layer pane, click **Top 5 values of request.keyword**. ++ +a. In the **Number of values** field, enter `10`. ++ +b. In the **Name** field, enter `Page URL`. ++ +c. Click **Close**. +. Click **Save and return**. ++ +Since the table columns are labeled, you do not need to add a panel title. + +[discrete] +[[explore-your-data-visualizations-compare-a-subset-of-documents-to-all-documents]] +== Compare a subset of documents to all documents + +Create a proportional visualization that helps you determine if your users transfer more bytes from documents under 10KB versus documents over 10KB. + +. On the dashboard, click **Create visualization**. +. From the **Available fields** list, drag **bytes** to the **Vertical axis** field in the layer pane. +. In the layer pane, click **Median of bytes**. +. Click the **Sum** quick function, then click **Close**. +. From the **Available fields** list, drag **bytes** to the **Break down by** field in the layer pane. + +To select documents based on the number range of a field, use the **Intervals** function. +When the ranges are non numeric, or the query requires multiple clauses, you could use the **Filters** function. + +Specify the file size ranges: + +. In the layer pane, click **bytes**. +. Click **Create custom ranges**, enter the following in the **Ranges** field, then press Return: + +* **Ranges** — `0` -> `10240` +* **Label** — `Below 10KB` + +. Click **Add range**, enter the following, then press Return: + +* **Ranges** — `10240` -> `+∞` +* **Label** — `Above 10KB` + +. From the **Value format** dropdown, select **Bytes (1024)**, then click **Close**. + +To display the values as a percentage of the sum of all values, use the **Pie** chart. + +. Open the **Visualization Type** dropdown, then select **Pie**. +. Click **Save and return**. + +Add a panel title: + +. Open the panel menu, then select **Panel settings**. +. In the **Title** field, enter `Sum of bytes from large requests`, then click **Apply**. + +[discrete] +[[explore-your-data-visualizations-view-the-distribution-of-a-number-field]] +== View the distribution of a number field + +The distribution of a number can help you find patterns. For example, you can analyze the website traffic per hour to find the best time for routine maintenance. + +. On the dashboard, click **Create visualization**. +. From the **Available fields** list, drag **bytes** to **Vertical axis** field in the layer pane. +. In the layer pane, click **Median of bytes**. ++ +a. Click the **Sum** quick function. ++ +b. In the **Name** field, enter `Transferred bytes`. ++ +c. From the **Value format** dropdown, select **Bytes (1024)**, then click **Close**. +. From the **Available fields** list, drag **hour_of_day** to **Horizontal axis** field in the layer pane. +. In the layer pane, click **hour_of_day**, then slide the **Intervals granularity** slider until the horizontal axis displays hourly intervals. +. Click **Save and return**. + +Add a panel title: + +. Open the panel menu, then select **Panel settings**. +. In the **Title** field, enter `Website traffic`, then click **Apply**. + +[discrete] +[[explore-your-data-visualizations-create-a-multi-level-chart]] +== Create a multi-level chart + +**Table** and **Proportion** visualizations support multiple functions. For example, to create visualizations that break down the data by website traffic sources and user geography, apply the **Filters** and **Top values** functions. + +. On the dashboard, click **Create visualization**. +. Open the **Visualization type** dropdown, then select **Treemap**. +. From the **Available fields** list, drag **Records** to the **Metric** field in the layer pane. +. In the layer pane, click **Add or drag-and-drop a field** for **Group by**. + +Create a filter for each website traffic source: + +. Click **Filters**. +. Click **All records**, enter the following in the query bar, then press Return: + +* **KQL** — `referer : **facebook.com**` +* **Label** — `Facebook` + +. Click **Add a filter**, enter the following in the query bar, then press Return: + +* **KQL** — `referer : **twitter.com**` +* **Label** — `Twitter` + +. Click **Add a filter**, enter the following in the query bar, then press Return: + +* **KQL** — `NOT referer : **twitter.com** OR NOT referer: **facebook.com**` +* **Label** — `Other` + +. Click **Close**. + +Add the user geography grouping: + +. From the **Available fields** list, drag **geo.srcdest** to the workspace. +. To change the **Group by** order, drag **Top 3 values of geo.srcdest** in the layer pane so that appears first. + +Remove the documents that do not match the filter criteria: + +. In the layer pane, click **Top 3 values of geo.srcdest**. +. Click **Advanced**, deselect **Group other values as "Other"**, then click **Close**. +. Click **Save and return**. + +Add a panel title: + +. Open the panel menu, then select **Panel settings**. +. In the **Title** field, enter `Page views by location and referrer`, then click **Apply**. + +[discrete] +[[explore-your-data-visualizations-visualization-panels]] +== Visualization panels + +Visualization panels are how you display visualizations of your data and what make Kibana such a useful tool. Panels are designed to build interactive dashboards. + +[discrete] +[[explore-your-data-visualizations-create-and-add-panels]] +=== Create and add panels + +Create new panels, which can be accessed from the dashboard toolbar or the **Visualize Library**, or add panels that are saved in the **Visualize Library**, or search results from <>. + +Panels added to the **Visualize Library** are available to all dashboards. + +To create panels from the dashboard: + +. From the main menu, click **Dashboard** and select **[Logs] Web Traffic**. +. Click **Edit** then click **Create visualization**. +. From the **Available fields** drag and drop the data you want to visualize. +. Click **Save and return**. +. Click **Save** to add the new panel to your dashboard. + +To create panels from the **Visualize Library**: + +. From the main menu, click **Visualize Library**. +. Click **Create visualization**, then select an editor from the options. +. Click **Save** once you have created your new visualization. +. In the modal, enter a **Title**, **Description**, and decide if you want to save the new panel to an existing dashboard, a new dashboard, or to the **Visualize Library**. +. Save the panel. + +To add existing panels from the **Visualize Library**: + +. From the main menu, click **Dashboard** and select **[Logs] Web Traffic**. +. Click **Edit** then in the dashboard toolbar, click **Add from library**. +. Click the panel you want to add to the dashboard, then click _X_. + +[discrete] +[[explore-your-data-visualizations-save-panels]] +=== Save panels + +Consider where you want to save and add the panel in {kib}. + +[discrete] +[[explore-your-data-visualizations-save-to-the-visualize-library]] +==== Save to the Visualize Library + +To use the panel on other dashboards, save the panel to the **Visualize Library**. When panels are saved in the **Visualize Library**, image:images/icons/folderCheck.svg[Visualize Library] appears in the panel header. + +If you created the panel from the dashboard: + +. Open the panel menu and click **More → Save to library**. +. Enter the **Title** and click **Save**. + +If you created the panel from the **Visualize Library**: + +. In the editor, click **Save**. +. Under **Save visualization** enter a **Title**, **Description**, and decide if you want to save the new panel to an existing dashboard, a new dashboard, or to the **Visualize Library**. +. Click **Save and go to Dashboard**. +. Click **Save**. + +[discrete] +[[explore-your-data-visualizations-save-to-the-dashboard]] +==== Save to the dashboard + +Return to the dashboard and add the panel without specifying the save options or adding the panel to the **Visualize Library**. + +If you created the panel from the dashboard: + +. In the editor, click **Save and return**. +. Click **Save**. + +If you created the panel from the **Visualize Library**: + +. Click **Save**. +. Under **Save visualization** enter a **Title**, **Description**, and decide if you want to save the new panel to an existing dashboard, a new dashboard, or to the **Visualize Library**. +. Click **Save and go to Dashboard**. +. Click **Save**. + +To add unsaved panels to the **Visualize Library**: + +. Open the panel menu, then select **More → Save to library**. +. Enter the panel title, then click **Save**. + +[discrete] +[[explore-your-data-visualizations-arrange-panels]] +=== Arrange panels + +Compare the data in your panels side-by-side, organize panels by priority, resize the panels so they all appear on the dashboard without scrolling down, and more. + +In the toolbar, click **Edit**, then use the following options: + +* To move, click and hold the panel header, then drag to the new location. +* To resize, click the resize control, then drag to the new dimensions. +* To maximize to fullscreen, open the panel menu, then click **More → Maximize panel**. + +[discrete] +[[explore-your-data-visualizations-add-text-panels]] +=== Add text panels + +Add **Text** panels to your dashboard that display important information, instructions, and more. You create **Text** panels using https://github.github.com/gfm/[GitHub-flavored Markdown] text. + +. On the dashboard, click **Edit**. +. Click **Add panel** and select **image:images/icons/visText.svg[Create new text] Text**. +. Check the rendered text, then click **Save and return**. +. To save the new text panel to your dashboard click **Save**. + +[discrete] +[[explore-your-data-visualizations-add-image-panels]] +=== Add image panels + +To personalize your dashboards, add your own logos and graphics with the **Image** panel. You can upload images from your computer, or add images from an external link. + +. On the dashboard, click **Edit**. +. Click **Add panel** and select **image:images/icons/image.svg[Add image] Image**. +. Use the editor to add an image. +. Click **Save**. +. To save the new image panel to your dashboard click **Save**. + +To manage your uploaded image files, open the main menu, then click ** Management → Files**. + +[WARNING] +==== +When you export a dashboard, the uploaded image files are not exported. +When importing a dashboard with an image panel, and the image file is unavailable, the image panel displays a `not found` warning. Such panels have to be fixed manually by re-uploading the image using the panel's image editor. +==== + +[discrete] +[[edit-panels]] +=== Edit panels + +To make changes to the panel, use the panel menu options. + +. In the toolbar, click **Edit**. +. Open the panel menu, then use the following options: + +* **Edit Lens** — Opens **Lens** so you can make changes to the visualization. +* **Edit visualization** — Opens the editor so you can make changes to the panel. +* **Edit map** — Opens the editor so you can make changes to the map panel. ++ +The above options display in accordance to the type of visualization the panel is made up of. +* **Edit Lens** — Opens aggregation-based visualizations in **Lens**. +* **Clone panel** — Opens a copy of the panel on your dashboard. +* **Panel settings** — Opens the **Panel settings** window to change the **title**, **description**, and **time range**. +* **More → Inspect** — Opens an editor so you can view the data and the requests that collect that data. +* **More → Explore data in Discover** — Opens that panels data in **Discover**. +* **More → Save to library** — Saves the panel to the **Visualize Library**. +* **More → Maximize panel** — Maximizes the panel to full screen. +* **More → Download as CSV** — Downloads the data as a CSV file. +* **More → Replace panel** — Opens the **Visualize Library** so you can select a new panel to replace the existing panel. +* **More → Copy to dashboard** — Copy the panel to a different dashboard. +* **More → Delete from dashboard** — Removes the panel from the dashboard. diff --git a/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.mdx b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx similarity index 100% rename from serverless/pages/explore-your-data-visualize-your-data-create-visualizations.mdx rename to serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc new file mode 100644 index 0000000000..67548ab641 --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc @@ -0,0 +1,38 @@ +[[explore-your-data-visualize-your-data]] += Visualize your data + +:description: Build dynamic dashboards and visualizations for your {es} data. +:keywords: serverless, elasticsearch, visualize, how to + +preview:[] + +The best way to understand your data is to visualize it. + +Elastic provides a wide range of pre-built dashboards for visualizing data from a variety of sources. +These dashboards are loaded automatically when you install https://www.elastic.co/docs/current/integrations[Elastic integrations]. + +You can also create new dashboards and visualizations based on your data views to get a full picture of your data. + +In your {es} project, go to **Dashboards** to see existing dashboards or create your own. + +Notice you can filter the list of dashboards: + +* Use the text search field to filter by name or description. +* Use the **Tags** menu to filter by tag. To create a new tag or edit existing tags, click **Manage tags**. +* Click a dashboard's tags to toggle filtering for each tag. + +[discrete] +[[explore-your-data-visualize-your-data-create-new-dashboards]] +== Create new dashboards + +To create a new dashboard, click **Create dashboard** and begin adding visualizations. +You can create charts, graphs, maps, tables, and other types of visualizations from your data, or you can add visualizations from the library. + +You can also add other types of panels — such as filters, links, and text — and add controls like time sliders. + +For more information about creating dashboards, refer to the {kibana-ref}/dashboard.html[{kib} documentation]. + +[NOTE] +==== +The {kib} documentation is written for {kib} users, but the steps for serverless are very similar. +==== diff --git a/serverless/pages/explore-your-data-visualize-your-data.mdx b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.mdx similarity index 100% rename from serverless/pages/explore-your-data-visualize-your-data.mdx rename to serverless/pages/elasticsearch/explore-your-data-visualize-your-data.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data.asciidoc new file mode 100644 index 0000000000..5c869f2fd3 --- /dev/null +++ b/serverless/pages/elasticsearch/explore-your-data.asciidoc @@ -0,0 +1,14 @@ +[[explore-your-data]] += Explore your data + +:description: Turn {es} data into actionable insights with aggregations, visualizations, and alerts +:keywords: serverless, elasticsearch, explore, overview + +preview:[] + +In addition to search, {es3} offers several options for analyzing and visualizing your data. + +* <>: Use the {es} REST API to summarize your data as metrics, statistics, or other analytics. +* <>: Use the **Discover** UI to filter your data or learn about its structure. +* <>: Build dynamic dashboards that visualize your data as charts, gauges, graphs, maps, and more. +* <>: Create rules that trigger notifications based on your data. diff --git a/serverless/pages/explore-your-data.mdx b/serverless/pages/elasticsearch/explore-your-data.mdx similarity index 100% rename from serverless/pages/explore-your-data.mdx rename to serverless/pages/elasticsearch/explore-your-data.mdx diff --git a/serverless/pages/elasticsearch/get-started.asciidoc b/serverless/pages/elasticsearch/get-started.asciidoc new file mode 100644 index 0000000000..9e55c8640a --- /dev/null +++ b/serverless/pages/elasticsearch/get-started.asciidoc @@ -0,0 +1,250 @@ +[[get-started]] += Get started + +:description: Get started with {es3} in a few steps +:keywords: serverless, elasticsearch, getstarted, overview + +preview:[] + +Follow along to set up your {es} project and get started with some sample documents. +Then, choose how to continue with your own data. + +[discrete] +[[get-started-create-project]] +== Create project + +Use your {ecloud} account to create a fully-managed {es} project: + +. Navigate to {ess-console}[cloud.elastic.co] and create a new account or log in to your existing account. +. Within **Fully-managed projects**, choose **Create project**. +. Choose the {es} project type. +. Select a **configuration** for your project, based on your use case. ++ +** **General purpose**. For general search use cases across various data types. +** **Optimized for Vectors**. For search use cases using vectors and near real-time retrieval. +. Provide a name for the project and optionally edit the project settings, such as the cloud platform https://www.elastic.co/docs/current/serverless/regions[region]. +Select **Create project** to continue. +. Once the project is ready, select **Continue**. + +You should now see **Get started with {es}**, and you're ready to continue. + +include::../../partials/minimum-vcus-detail.asciidoc[] + +[discrete] +[[get-started-create-api-key]] +== Create API key + +Create an API key, which will enable you to access the {es} API to ingest and search data. + +. Scroll to **Add an API Key** and select **New**. +. In **Create an API key**, enter a name for your key and its expiration. +Select **Create API Key** to finish. + +The API key is displayed as a set of values, including `id`, `name`, `expiration`, `api_key`, and `encoded`. +Store this information securely—it is displayed only once. + +You will use the `encoded` value when sending API requests. + +[NOTE] +==== +You can't recover or retrieve a lost API key. Instead, you must delete the key and create a new one. +==== + +[discrete] +[[get-started-copy-url]] +== Copy URL + +Next, copy the URL of your API endpoint. +You'll send all {es} API requests to this URL. + +. Scroll to **Copy your connection details**. +. Find the value for **Elasticsearch Endpoint**. + +Store this value along with your `encoded` API key. +You'll use both values in the next step. + +[discrete] +[[get-started-test-connection]] +== Test connection + +We'll use the `curl` command to test your connection and make additional API requests. +(See https://everything.curl.dev/get[Install curl] if you need to install this program.) + +`curl` will need access to your Elasticsearch Endpoint and `encoded` API key. +Within your terminal, assign these values to the `ES_URL` and `API_KEY` environment variables. + +For example: + +[source,bash] +---- +export ES_URL="https://dda7de7f1d264286a8fc9741c7741690.es.us-east-1.aws.elastic.cloud:443" +export API_KEY="ZFZRbF9Jb0JDMEoxaVhoR2pSa3Q6dExwdmJSaldRTHFXWEp4TFFlR19Hdw==" +---- + +Then run the following command to test your connection: + +[source,bash] +---- +curl "${ES_URL}" \ + -H "Authorization: ApiKey ${API_KEY}" \ + -H "Content-Type: application/json" +---- + +You should receive a response similar to the following: + +[source,json] +---- +{ + "name" : "serverless", + "cluster_name" : "dda7de7f1d264286a8fc9741c7741690", + "cluster_uuid" : "ws0IbTBUQfigmYAVMztkZQ", + "version" : { ... }, + "tagline" : "You Know, for Search" +} +---- + +Now you're ready to ingest and search some sample documents. + +[discrete] +[[get-started-ingest-data]] +== Ingest data + +[NOTE] +==== +This example uses {es} APIs to ingest data. If you'd prefer to upload a file using the UI, refer to <>. +==== + +To ingest data, you must create an index and store some documents. +This process is also called "indexing". + +You can index multiple documents using a single `POST` request to the `_bulk` API endpoint. +The request body specifies the documents to store and the indices in which to store them. + +{es} will automatically create the index and map each document value to one of its data types. +Include the `?pretty` option to receive a human-readable response. + +Run the following command to index some sample documents into the `books` index: + +[source,bash] +---- +curl -X POST "${ES_URL}/_bulk?pretty" \ + -H "Authorization: ApiKey ${API_KEY}" \ + -H "Content-Type: application/json" \ + -d ' +{ "index" : { "_index" : "books" } } +{"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} +{ "index" : { "_index" : "books" } } +{"name": "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585} +{ "index" : { "_index" : "books" } } +{"name": "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328} +{ "index" : { "_index" : "books" } } +{"name": "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227} +{ "index" : { "_index" : "books" } } +{"name": "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268} +{ "index" : { "_index" : "books" } } +{"name": "The Handmaids Tale", "author": "Margaret Atwood", "release_date": "1985-06-01", "page_count": 311} +' +---- + +You should receive a response indicating there were no errors: + +[source,json] +---- +{ + "errors" : false, + "took" : 1260, + "items" : [ ... ] +} +---- + +[discrete] +[[get-started-search-data]] +== Search data + +To search, send a `POST` request to the `_search` endpoint, specifying the index to search. +Use the {es} query DSL to construct your request body. + +Run the following command to search the `books` index for documents containing `snow`: + +[source,bash] +---- +curl -X POST "${ES_URL}/books/_search?pretty" \ + -H "Authorization: ApiKey ${API_KEY}" \ + -H "Content-Type: application/json" \ + -d ' +{ + "query": { + "query_string": { + "query": "snow" + } + } +} +' +---- + +You should receive a response with the results: + +[source,json] +---- +{ + "took" : 24, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 1, + "relation" : "eq" + }, + "max_score" : 1.5904956, + "hits" : [ + { + "_index" : "books", + "_id" : "Z3hf_IoBONQ5TXnpLdlY", + "_score" : 1.5904956, + "_source" : { + "name" : "Snow Crash", + "author" : "Neal Stephenson", + "release_date" : "1992-06-01", + "page_count" : 470 + } + } + ] + } +} +---- + +[discrete] +[[get-started-continue-on-your-own]] +== Continue on your own + +Congratulations! +You've set up an {es} project, and you've ingested and searched some sample data. +Now you're ready to continue on your own. + +[discrete] +[[get-started-explore]] +=== Explore + +Want to explore the sample documents or your own data? + +By creating a data view, you can explore data using several UI tools, such as Discover or Dashboards. Or, use {es} aggregations to explore your data using the API. Find more information in <>. + +[discrete] +[[get-started-build]] +=== Build + +Ready to build your own solution? + +To learn more about sending and syncing data to {es}, or the search API and its query DSL, check <> and <>. + +//// +/* +- +- +*/ +//// diff --git a/serverless/pages/get-started.mdx b/serverless/pages/elasticsearch/get-started.mdx similarity index 100% rename from serverless/pages/get-started.mdx rename to serverless/pages/elasticsearch/get-started.mdx diff --git a/serverless/pages/elasticsearch/index.asciidoc b/serverless/pages/elasticsearch/index.asciidoc new file mode 100644 index 0000000000..cc4100c405 --- /dev/null +++ b/serverless/pages/elasticsearch/index.asciidoc @@ -0,0 +1,53 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + += Elasticsearch + +include::./what-is-elasticsearch-serverless.asciidoc[leveloffset=+1] + +include::./pricing.asciidoc[leveloffset=+1] + +include::./get-started.asciidoc[leveloffset=+1] + +include::./clients.asciidoc[leveloffset=+1] +include::./clients-go-getting-started.asciidoc[leveloffset=+2] +include::./clients-java-getting-started.asciidoc[leveloffset=+2] +include::./clients-dot-net-getting-started.asciidoc[leveloffset=+2] +include::./clients-nodejs-getting-started.asciidoc[leveloffset=+2] +include::./clients-php-getting-started.asciidoc[leveloffset=+2] +include::./clients-python-getting-started.asciidoc[leveloffset=+2] +include::./clients-ruby-getting-started.asciidoc[leveloffset=+2] + +include::./apis-http-apis.asciidoc[leveloffset=+1] +include::./apis-elasticsearch-conventions.asciidoc[leveloffset=+2] +include::./apis-kibana-conventions.asciidoc[leveloffset=+2] + +include::./elasticsearch-developer-tools.asciidoc[leveloffset=+1] + +include::./ingest-your-data.asciidoc[leveloffset=+1] +include::./ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+2] +include::./ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+2] +include::./ingest-your-data-upload-file.asciidoc[leveloffset=+2] +include::./ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+2] +include::./ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+2] + +include::./search-your-data.asciidoc[leveloffset=+1] +include::./search-your-data-the-search-api.asciidoc[leveloffset=+2] +include::./search-with-synonyms.asciidoc[leveloffset=+2] +include::./knn-search.asciidoc[leveloffset=+2] +include::./search-your-data-semantic-search.asciidoc[leveloffset=+2] +include::./search-your-data-semantic-search-elser.asciidoc[leveloffset=+3] + +include::./explore-your-data.asciidoc[leveloffset=+1] +include::./explore-your-data-the-aggregations-api.asciidoc[leveloffset=+2] +include::./explore-your-data-discover-your-data.asciidoc[leveloffset=+2] +include::./explore-your-data-visualize-your-data.asciidoc[leveloffset=+2] +include::./explore-your-data-alerting.asciidoc[leveloffset=+2] + +include::./search-playground.asciidoc[leveloffset=+1] + +include::./serverless-differences.asciidoc[leveloffset=+1] + +include::./technical-preview-limitations.asciidoc[leveloffset=+1] diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc new file mode 100644 index 0000000000..94f891d904 --- /dev/null +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc @@ -0,0 +1,140 @@ +[[ingest-data-through-api]] += Ingest data through API + +:description: Add data to {es} using HTTP APIs or a language client. +:keywords: serverless, elasticsearch, ingest, api, how to + +preview:[] + +The {es} APIs enable you to ingest data through code. +You can use the APIs of one of the +<> or the +{es} HTTP APIs. The examples +on this page use the HTTP APIs to demonstrate how ingesting works in +{es} through APIs. +If you want to ingest timestamped data or have a +more complex ingestion use case, check out +<> or +<>. + +// . + +// ^^^^Page temporarily removed + +[discrete] +[[ingest-data-through-api-using-the-bulk-api]] +== Using the bulk API + +You can index multiple JSON documents to an index and make it searchable using +the bulk API. + +The following example uses the bulk API to ingest book-related data into an +index called `books`. The API call creates the index if it doesn't exist already. + +[source,bash] +---- +curl -X POST "${ES_URL}/_bulk?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ "index" : { "_index" : "books" } } +{"title": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} +{ "index" : { "_index" : "books" } } +{"title": "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585} +{ "index" : { "_index" : "books" } } +{"title": "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328} +{ "index" : { "_index" : "books" } } +{"title": "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227} +{ "index" : { "_index" : "books" } } +{"title": "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268} +{ "index" : { "_index" : "books" } } +{"title": "The Blind Assassin", "author": "Margaret Atwood", "release_date": "2000-09-02", "page_count": 536} +' +---- + +The API returns a response similar to this: + +[source,json] +---- +{ + "errors": false, + "took": 902, + "items": [ + { + "index": { + "_index": "books", + "_id": "MCYbQooByucZ6Gimx2BL", + "_version": 1, + "result": "created", + "_shards": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "_seq_no": 0, + "_primary_term": 1, + "status": 201 + } + }, + ... + ] +} +---- + +[NOTE] +==== +Under the hood, the bulk request creates a data schema, called "mappings" for the `books` index. +To review the mappings and ensure the JSON body matches the index mappings, navigate to **Content** → **Index management**, select the index you want to ingest the data into, and click the **Mappings** tab. +==== + +The API call creates an index called `books` and adds six documents to it. All +those documents have the `title`, `author`, `release_date`, and `page_count` +fields with associated values. This data is now searchable. + +You can check if a book is in the index by calling the search API and specifying +either of the properties of the book in a `match` query, for example: + +[source,bash] +---- +curl "${ES_URL}/books/_search?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query": { + "match": { + "title": "Snow Crash" + } + } +} +' +---- + +The API response contains an array of hits. Each hit represents a document that +matches the query. The response contains the whole document. Only one document +matches this query. + +[discrete] +[[ingest-data-through-api-using-the-index-api]] +== Using the index API + +Use the index API to ingest a single document to an index. Following the +previous example, a new document will be added to the `books` index. + +[source,bash] +---- +curl -X POST "${ES_URL}/books/_doc/" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "title": "Neuromancer", + "author": "William Gibson", + "release_date": "1984-07-01", + "page_count": "271" +} +' +---- + +The API call indexes the new document into the `books` index. Now you can search +for it! diff --git a/serverless/pages/ingest-your-data-ingest-data-through-api.mdx b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.mdx similarity index 100% rename from serverless/pages/ingest-your-data-ingest-data-through-api.mdx rename to serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc new file mode 100644 index 0000000000..8f66b40bdd --- /dev/null +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc @@ -0,0 +1,49 @@ +[[ingest-data-through-beats]] += Beats + +:description: Use {beats} to ship operational data to {es}. +:keywords: serverless, elasticsearch, ingest, beats, how to + +preview:[] + +{beats} are lightweight data shippers that send operational data to {es}. +Elastic provides separate {beats} for different types of data, such as logs, metrics, and uptime. +Depending on what data you want to collect, you may need to install multiple shippers on a single host. + +|=== +| Data| {beats} + +| Audit data +| https://www.elastic.co/products/beats/auditbeat[Auditbeat] + +| Log files and journals +| https://www.elastic.co/products/beats/filebeat[Filebeat] + +| Cloud data +| https://www.elastic.co/products/beats/functionbeat[Functionbeat] + +| Availability +| https://www.elastic.co/products/beats/heartbeat[Heartbeat] + +| Metrics +| https://www.elastic.co/products/beats/metricbeat[Metricbeat] + +| Network traffic +| https://www.elastic.co/products/beats/packetbeat[Packetbeat] + +| Windows event logs +| https://www.elastic.co/products/beats/winlogbeat[Winlogbeat] +|=== + +{beats} can send data to {es} directly or through {ls}, where you +can further process and enhance the data before visualizing it in {kib}. + +.Authenticating with Elasticsearch +[NOTE] +==== +When you use {beats} to export data to an {es} project, the {beats} require an API key to authenticate with {es}. +Refer to <> for the steps to set up your API key, +and to https://www.elastic.co/guide/en/beats/filebeat/current/beats-api-keys.html[Grant access using API keys] in the Filebeat documentation for an example of how to configure your {beats} to use the key. +==== + +Check out {beats-ref}/getting-started.html[Get started with Beats] for some next steps. diff --git a/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.mdx b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.mdx similarity index 100% rename from serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.mdx rename to serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc new file mode 100644 index 0000000000..20b689d7fb --- /dev/null +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc @@ -0,0 +1,303 @@ +[[ingest-data-through-integrations-connector-client]] += Connector clients + +[NOTE] +==== +This page contains high-level instructions about setting up connector clients in your project's UI. +Because prerequisites and configuration details vary by data source, you'll need to refer to the individual connector documentation for specific details. +==== + +A *connector* is a type of https://www.elastic.co/integrations/data-integrations[Elastic integration] that syncs data from an original data source to {es}. +Each connector extracts the original files, records, or objects; and transforms them into documents within {es}. + +*Connector clients* are **self-managed** connectors that you run on your own infrastructure. +These connectors are written in Python and the source code is available in the https://github.com/elastic/connectors/tree/main/connectors/sources[`elastic/connectors`] repo. + +[discrete] +[[ingest-data-through-integrations-connector-client-available-connectors]] +== Available connectors + +Connector clients are available for the following third-party data sources: + +[%collapsible] +.Click to expand +===== +// TODO: Update links if these references move + +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-azure-blob.html[Azure Blob Storage] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-box.html[Box] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-confluence.html[Confluence] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-dropbox.html[Dropbox] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-github.html[GitHub] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-gmail.html[Gmail] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-google-cloud.html[Google Cloud Storage] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-google-drive.html[Google Drive] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-graphql.html[GraphQL] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-jira.html[Jira] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-ms-sql.html[MicrosoftSQL] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-mongodb.html[MongoDB] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-mysql.html[MySQL] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-network-drive.html[Network drive] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-notion.html[Notion] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-onedrive.html[OneDrive] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-opentext.html[OpenText Documentum] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-oracle.html[Oracle] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-outlook.html[Outlook] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-postgresql.html[PostgreSQL] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-redis.html[Redis] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-s3.html[S3] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-salesforce.html[Salesforce] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-servicenow.html[ServiceNow] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-sharepoint-online.html[SharePoint Online] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-sharepoint.html[SharePoint Server] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-slack.html[Slack] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-teams.html[Teams] +* https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors-zoom.html[Zoom] +===== + +[discrete] +[[ingest-data-through-integrations-connector-client-overview]] +== Overview + +Because connector clients are self-managed on your own infrastructure, they run outside of your {es} serverless project. + +You can run them from source or in a Docker container. + +.Workflow +[NOTE] +==== +In order to set up, configure, and run a connector you'll be moving between your third-party service, the {es} Serverless UI, and your terminal. +At a high-level, the workflow looks like this: + +. Satisfy any data source prerequisites (e.g., create an OAuth application). +. Create a connector in the UI. +. Deploy the connector service from source or with Docker. +. Enter data source configuration details in the UI. +==== + +[discrete] +[[ingest-data-through-integrations-connector-client-data-source-prerequisites]] +=== Data source prerequisites + +The first decision you need to make before deploying a connector is which third party service (data source) you want to sync to {es}. +See the list of <>. + +Note that each data source will have specific prerequisites you'll need to meet to authorize the connector to access its data. +For example, certain data sources may require you to create an OAuth application, or create a service account. +You'll need to check the <> for these details. + +[discrete] +[[ingest-data-through-integrations-connector-client-step-1-initial-setup-in-ui]] +== Step 1: Initial setup in UI + +In your project's UI, go to **{es} → Connectors**. +Follow these steps: + +. Select **Create a connector**. +. Choose a third-party service from the list of connector types. +. Add a name and optional description to identify the connector. +. Copy the `connector_id`, `service_type`, and `elasticsearch.host` values printed to the screen. +You'll need to update these values in your https://github.com/elastic/connectors/blob/main/config.yml[`config.yml`] file. +. Navigate to **Elasticsearch → Home**, and make a note of your **{es} endpoint** and **API key** values. You can create a new API key by clicking on **New** in the **API key** section. +. Run the connector code either from source or with Docker, following the instructions below. + +[discrete] +[[ingest-data-through-integrations-connector-client-step-2-deploy-your-self-managed-connector]] +== Step 2: Deploy your self-managed connector + +To use connector clients, you must deploy the connector service so your connector can talk to your {es} instance. +The source code is hosted in the `elastic/connectors` repository. + +You have two deployment options: + +* Run with <> (recommended) +* Run from <> + +[NOTE] +==== +You'll need the following values handy to update your `config.yml` file: + +* `elasticsearch.host`: Your {es} endpoint. Printed to the screen when you create a new connector. +* `elasticsearch.api_key`: Your {es} API key. You can create API keys by navigating to **Home**, and clicking **New** in the **API key** section. Once your connector is running, you'll be able to create a new API key that is limited to only access the connector's index. +* `connector_id`: Unique id for your connector. Printed to the screen when you create a new connector. +* `service_type`: Original data source type. Printed to the screen when you create a new connector. +==== + +[discrete] +[[ingest-data-through-integrations-connector-client-run-with-docker]] +=== Run with Docker + +You can deploy connector clients using Docker. +Follow these instructions. + +**Step 1: Download sample configuration file** + +You can either download the configuration file manually or run the following command: + +[source,shell] +---- +curl https://raw.githubusercontent.com/elastic/connectors/main/config.yml.example --output /connectors-config/config.yml +---- + +Change the `--output` argument value to the path where you want to save the configuration file. + +**Step 2: Update the configuration file for your self-managed connector** + +* Update the following settings to match your environment: + +* `elasticsearch.host` +* `elasticsearch.api_key` +* `connector id` +* `service_type` + +Your configuration file should look like this: + +[source,yaml] +---- +elasticsearch.host: +elasticsearch.api_key: + +connectors: + - + connector_id: + service_type: # sharepoint_online (example) + api_key: # Optional. If not provided, the connector will use the elasticsearch.api_key instead +---- + +**Step 3: Run the Docker image** + +Use the following command, substituting values where necessary: + +[source,shell] +---- +docker run \ +-v "/connectors-config:/config" \ # NOTE: change absolute path to match where config.yml is located on your machine +--tty \ +--rm \ +docker.elastic.co/enterprise-search/elastic-connectors:{version}.0 \ +/app/bin/elastic-ingest \ +-c /config/config.yml # Path to your configuration file in the container +---- + +Find all available Docker images in the https://www.docker.elastic.co/r/enterprise-search/elastic-connectors[official Elastic Docker registry]. + +[TIP] +==== +Each individual connector client reference contain instructions for deploying specific connectors using Docker. +==== + +[discrete] +[[ingest-data-through-integrations-connector-client-run-from-source]] +=== Run from source + +Running from source requires cloning the repository and running the code locally. +Use this approach if you're actively customizing connectors. + +Follow these steps: + +* Clone or fork the repository locally with the following command: ++ +[source,shell] +---- +git clone https://github.com/elastic/connectors +---- +* Open the `config.yml.example` file in the `connectors` repository and rename it to `config.yml`. +* Update the following settings to match your environment: + +* `elasticsearch.host` +* `elasticsearch.api_key` +* `connector id` +* `service_type` + +Your configuration file should look like this: + +[source,yaml] +---- +elasticsearch.host: +elasticsearch.api_key: + +connectors: + - + connector_id: + service_type: # sharepoint_online (example) + api_key: # Optional. If not provided, the connector will use the elasticsearch.api_key instead +---- + +[TIP] +==== +Learn more about the `config.yml` file in the https://github.com/elastic/connectors/blob/main/docs/CONFIG.md[repo docs]. +==== + +**Run the connector service** + +Once you've configured the connector code, you can run the connector service. + +In your terminal or IDE: + +* `cd` into the root of your `elastic/connectors` clone/fork. +* Run the following commands to compile and run the connector service: ++ +[source,shell] +---- +make install +make run +---- + +The connector service should now be running in your terminal. If the connection to your {es} instance was successful, the **Configure your connector** step will be activated in the project's UI. + +Here we're working locally. In a production setup, you'll deploy the connector service to your own infrastructure. + +[discrete] +[[ingest-data-through-integrations-connector-client-step-3-enter-data-source-details-in-ui]] +== Step 3: Enter data source details in UI + +Once the connector service is running, it's time to head back to the UI to finalize the connector configuration. +You should now see the **Configure your connector** step in your project's UI. + +In this step, you need to add the specific connection details about your data source instance, like URL, authorization credentials, etc. +These **details will vary** based on the third-party data source you’re connecting to. + +For example, the Sharepoint Online connector requires the following details about your Sharepoint instance: + +* **Tenant ID** +* **Tenant name** +* **Client ID** +* **Secret value** +* **Comma-separated list of tables** + +[discrete] +[[ingest-data-through-integrations-connector-client-step-4-connect-to-an-index]] +== Step 4: Connect to an index + +Once you've entered the data source details, you need to connect to an index. +This is the final step in your project's UI, before you can run a sync. + +You can choose to sync to an existing {es} index, or create a new index for your connector. +You can also create an API key that is limited to only access your selected index. + +.Index name prefix +[IMPORTANT] +==== +Due to a bug, you must prefix your index name with `search-`, otherwise you will hit an error. +For example, `search-my-index` is a valid index name, but `my-index` is not. +==== + +[NOTE] +==== +When choosing an existing index for the connector to sync to, please ensure mappings are defined and are appropriate for incoming data. Connectors will not successfully sync to existing indices without mappings. If you are unsure about managing index mappings, choose to have your connector create the new index. +==== + +Once this step is completed, you're ready to run a sync. +When a sync is launched you'll start to see documents being added to your {es} index. + +Learn https://github.com/elastic/connectors/blob/main/docs/DEVELOPING.md#syncing[how syncing works] in the `elastic/connectors` repo docs. + +[discrete] +[[ingest-data-through-integrations-connector-client-learn-more]] +== Learn more + +* Read the main https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors.html[Elastic connectors documentation] +* The https://github.com/elastic/connectors/tree/main#connector-framework[Elastic connector framework] enables you to: ++ +** Customize existing connector clients. +** Build your own connector clients. diff --git a/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.mdx b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx similarity index 100% rename from serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.mdx rename to serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc new file mode 100644 index 0000000000..0a5ca16e3a --- /dev/null +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc @@ -0,0 +1,102 @@ +[[ingest-data-through-logstash]] += Logstash + +:description: Use {ls} to ship data to {es}. +:keywords: serverless, elasticsearch, ingest, logstash, how to + +preview:[] + +{ls} is an open source data collection engine with real-time pipelining capabilities. +It supports a wide variety of data sources, and can dynamically unify data from disparate sources and normalize the data into destinations of your choice. + +{ls} can collect data using a variety of {ls} {logstash-ref}/input-plugins.html[input plugins], enrich and transform the data with {ls} {logstash-ref}/filter-plugins.html[filter plugins], +and output the data to {es} using the {ls} {logstash-ref}/plugins-outputs-elasticsearch.html[Elasticsearch output plugin]. + +You can use {ls} to extend <> for advanced use cases, +such as data routed to multiple destinations or when you need to make your data persistent. + +.Logstash for Elasticsearch on serverless +[NOTE] +==== +{ls} is a powerful, versatile ETL (Extract, Transform, Load) engine that can play an important role in organizations of all sizes. +Some capabilities and features for large, self-managed users aren't appropriate for {serverless-short}. + +You'll use the {ls} {logstash-ref}/plugins-outputs-elasticsearch.html[`((es)) output plugin`] to send data to {es3}. +Some differences to note between {es3} and self-managed {es}: + +* Your logstash-output-elasticsearch configuration uses **API keys** to access {es} from {ls}. +User-based security settings are ignored and may cause errors. +* {es3} uses **{dlm} {(dlm-init})** instead of {ilm} {(ilm-init}). +If you add {ilm-init} settings to your {es} output configuration, they are ignored and may cause errors. +* **{ls} monitoring** for {serverless-short} is available through the https://github.com/elastic/integrations/blob/main/packages/logstash/_dev/build/docs/README.md[{ls} Integration] in https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[Elastic Observability]. + +**Known issue** + +* The logstash-output-elasticsearch `hosts` setting defaults to port `:9200`. +Set the value to port `:443` instead. +==== + +[discrete] +[[ingest-data-through-logstash-requirements]] +== Requirements + +To use {ls} to send data to {es3}, you must be using: + +* {ls} 8.10.1 or later +* {ls} {logstash-ref}/plugins-outputs-elasticsearch.html[{es} output plugin] 11.18.0 or later +* {ls} {logstash-ref}/plugins-inputs-elasticsearch.html[{es} input plugin] 4.18.0 or later +* {ls} {logstash-ref}/plugins-filters-elasticsearch.html[{es} filter plugin] 3.16.0 or later + +[discrete] +[[ingest-data-through-logstash-secure-connection]] +== Secure connection + +Serverless Elasticsearch simplifies secure communication between {ls} and {es}. +Configure the {logstash-ref}/plugins-outputs-elasticsearch.html[Elasticsearch output] plugin to use +{logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-cloud_id[`cloud_id`] and +{logstash-ref}/plugins-outputs-elasticsearch.html#plugins-outputs-elasticsearch-api_key[`api_key`]. +No additional SSL configuration steps are needed. + +[discrete] +[[ingest-data-through-logstash-api-keys-for-connecting-ls-to-es3]] +== API keys for connecting {ls} to {es3} + +Use the **Security: API key** section in the UI to https://www.elastic.co/docs/current/serverless/api-keys[create an API key] +for securely connecting the {ls} {es} output to {es3}. +We recommend creating a unique API key per {ls} instance. +You can create as many API keys as necessary. + +When you set up your API keys, use the metadata option to tag each API key with details that are meaningful to you. +This step makes managing multiple API keys easier. + +After you generate an API key, add it to your {ls} {logstash-ref}/plugins-outputs-elasticsearch.html[{es} output plugin] config file's `api_key` setting. +Here's an example: + +[source,bash] +---- +output { + elasticsearch { + api_key => "TiNAGG4BaaMdaH1tRfuU:KnR6yE41RrSowb0kQ0HWoA" + } +} +---- + +[discrete] +[[ingest-data-through-logstash-migrating-elasticsearch-data-using-ls]] +== Migrating Elasticsearch data using {ls} + +You can use {ls} to migrate data from self-managed {es} or {ess} to {es3}, or to migrate data from one {es3} deployment to another. + +Create a {logstash-ref}/configuration.html[{ls} pipeline] that includes the {es} {logstash-ref}/plugins-inputs-elasticsearch.html[input plugin] and {logstash-ref}/plugins-outputs-elasticsearch.html[output plugin]. + +Configure the {es} input to point to your source deployment or instance, and configure the {es} output with the `cloud_id` and `api_key` settings for your target {es3} instance. + +If your origin index is using <>, then you might need to adjust your index settings. + +[discrete] +[[ingest-data-through-logstash-next-steps]] +== Next steps + +Check out the https://www.elastic.co/logstash[Logstash product page] to see what {ls} can do for you. +When you're ready, +dive into the {logstash-ref}/index.html[Logstash documentation]. diff --git a/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.mdx b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.mdx similarity index 100% rename from serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.mdx rename to serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc new file mode 100644 index 0000000000..54487942a1 --- /dev/null +++ b/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc @@ -0,0 +1,47 @@ +[[ingest-data-file-upload]] += Upload a file + +:description: Add data to {es} using the File Uploader. +:keywords: serverless, elasticsearch, ingest, how to + +preview:[] + +You can upload files to {es} using the File Uploader. +Use the visualizer to inspect the data before importing it. + +You can upload different file formats for analysis: + +File formats supported up to 500 MB: + +* CSV +* TSV +* NDJSON +* Log files + +File formats supported up to 60 MB: + +* PDF +* Microsoft Office files (Word, Excel, PowerPoint) +* Plain Text (TXT) +* Rich Text (RTF) +* Open Document Format (ODF) + +[discrete] +[[ingest-data-file-upload-how-to-upload-a-file]] +== How to upload a file + +You'll find a link to the File Uploader on the {es} **Home** page. + +[role="screenshot"] +image::images/file-uploader-homepage-link.png[File upload link] + +Clicking **Upload a file** opens the File Uploader UI. + +[role="screenshot"] +image::images/file-uploader-UI.png[File upload UI] + +[IMPORTANT] +==== +The upload feature is not intended for use as part of a repeated production +process, but rather for the initial exploration of your data. +==== diff --git a/serverless/pages/ingest-your-data-upload-file.mdx b/serverless/pages/elasticsearch/ingest-your-data-upload-file.mdx similarity index 100% rename from serverless/pages/ingest-your-data-upload-file.mdx rename to serverless/pages/elasticsearch/ingest-your-data-upload-file.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data.asciidoc b/serverless/pages/elasticsearch/ingest-your-data.asciidoc new file mode 100644 index 0000000000..91fd190989 --- /dev/null +++ b/serverless/pages/elasticsearch/ingest-your-data.asciidoc @@ -0,0 +1,29 @@ +[[ingest-your-data]] += Ingest your data + +:description: Add data to your {es} project. +:keywords: serverless, elasticsearch, ingest, overview + +preview:[] + +You have many options for ingesting, or indexing, data into {es}: + +* <> +* <> +* <> +* <> +* <> + +The best ingest option(s) for your use case depends on whether you are indexing general content or time series (timestamped) data. + +**General content** + +General content includes HTML pages, catalogs, files, and other content that does not update continuously. +This data can be updated, but the value of the content remains relatively constant over time. +Use connector clients to sync data from a range of popular data sources to {es}. +You can also send data directly to {es} from your application using the API. + +**Times series (timestamped) data** + +Time series, or timestamped data, describes data that changes frequently and "flows" over time, such as stock quotes, system metrics, and network traffic data. +Use {beats} or {ls} to collect time series data. diff --git a/serverless/pages/ingest-your-data.mdx b/serverless/pages/elasticsearch/ingest-your-data.mdx similarity index 100% rename from serverless/pages/ingest-your-data.mdx rename to serverless/pages/elasticsearch/ingest-your-data.mdx diff --git a/serverless/pages/elasticsearch/knn-search.asciidoc b/serverless/pages/elasticsearch/knn-search.asciidoc new file mode 100644 index 0000000000..97ed5156b6 --- /dev/null +++ b/serverless/pages/elasticsearch/knn-search.asciidoc @@ -0,0 +1,1095 @@ +[[knn-search]] += k-nearest neighbor (kNN) search + +:description: Vector search with k-nearest neighbor (kNN). +:keywords: serverless, elasticsearch, search, vector, knn, ann + +preview:[] + +A _k-nearest neighbor_ (kNN) search finds the _k_ nearest vectors to a query +vector, as measured by a similarity metric. + +Common use cases for kNN include: + +* Relevance ranking based on natural language processing (NLP) algorithms +* Product recommendations and recommendation engines +* Similarity search for images or videos + +[discrete] +[[knn-search-prerequisites]] +== Prerequisites + +* To run a kNN search, you must be able to convert your data into meaningful +vector values. You can +{ml-docs}/ml-nlp-text-emb-vector-search-example.html[create these vectors using +a natural language processing (NLP) model in {es}], or generate them outside +{es}. Vectors can be added to documents as {ref}/dense-vector.html[`dense_vector`] field +values. Queries are represented as vectors with the same dimension. ++ +Design your vectors so that the closer a document's vector is to a query vector, +based on a similarity metric, the better its match. +* To complete the steps in this guide, you must have the following +{ref}/security-privileges.html#privileges-list-indices[index privileges]: ++ +** `create_index` or `manage` to create an index with a `dense_vector` field +** `create`, `index`, or `write` to add data to the index you created +** `read` to search the index + +[discrete] +[[knn-search-knn-methods]] +== kNN methods + +{es} supports two methods for kNN search: + +* Approximate kNN using the `knn` search +option +* Exact, brute-force kNN using a `script_score` query with a +vector function + +In most cases, you'll want to use approximate kNN. Approximate kNN offers lower +latency at the cost of slower indexing and imperfect accuracy. + +Exact, brute-force kNN guarantees accurate results but doesn't scale well with +large datasets. With this approach, a `script_score` query must scan each +matching document to compute the vector function, which can result in slow +search speeds. However, you can improve latency by using a {ref}/query-dsl.html[query] +to limit the number of matching documents passed to the function. If you +filter your data to a small subset of documents, you can get good search +performance using this approach. + +[discrete] +[[knn-search-approximate-knn]] +== Approximate kNN + +To run an approximate kNN search, use the {ref}/knn-search.html#approximate-knn[`knn` option] +to search one or more `dense_vector` fields with indexing enabled. + +. Explicitly map one or more `dense_vector` fields. Approximate kNN search +requires the following mapping options: ++ +** A `similarity` value. This value determines the similarity metric used to +score documents based on similarity between the query and document vector. For a +list of available metrics, see the {ref}/dense-vector.html#dense-vector-similarity[`similarity`] +parameter documentation. The `similarity` setting defaults to `cosine`. ++ +[source,bash] +---- +curl -X PUT "${ES_URL}/image-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "mappings": { + "properties": { + "image-vector": { + "type": "dense_vector", + "dims": 3, + "similarity": "l2_norm" + }, + "title-vector": { + "type": "dense_vector", + "dims": 5, + "similarity": "l2_norm" + }, + "title": { + "type": "text" + }, + "file-type": { + "type": "keyword" + } + } + } +} +' +---- +. Index your data. ++ +[source,bash] +---- +curl -X POST "${ES_URL}/image-index/_bulk?refresh=true" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ "index": { "_id": "1" } } +{ "image-vector": [1, 5, -20], "title-vector": [12, 50, -10, 0, 1], "title": "moose family", "file-type": "jpg" } +{ "index": { "_id": "2" } } +{ "image-vector": [42, 8, -15], "title-vector": [25, 1, 4, -12, 2], "title": "alpine lake", "file-type": "png" } +{ "index": { "_id": "3" } } +{ "image-vector": [15, 11, 23], "title-vector": [1, 5, 25, 50, 20], "title": "full moon", "file-type": "jpg" } +... +' +---- ++ +// TEST[continued] ++ +// TEST[s/\.\.\.//] +. Run the search using the {ref}/knn-search.html#approximate-knn[`knn` option]. ++ +[source,bash] +---- +curl -X POST "${ES_URL}/image-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "knn": { + "field": "image-vector", + "query_vector": [ + -5, + 9, + -12 + ], + "k": 10, + "num_candidates": 100 + }, + "fields": [ + "title", + "file-type" + ] +} +' +---- ++ +// TEST[continued] ++ +// TEST[s/"k": 10/"k": 3/] ++ +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + +The {ref}/search-search.html#search-api-response-body-score[document `_score`] is determined by +the similarity between the query and document vector. See +{ref}/dense-vector.html#dense-vector-similarity[`similarity`] for more information on how kNN +search scores are computed. + +[discrete] +[[knn-search-tune-approximate-knn-for-speed-or-accuracy]] +=== Tune approximate kNN for speed or accuracy + +To gather results, the kNN search API finds a `num_candidates` number of +approximate nearest neighbor candidates on each shard. The search computes the +similarity of these candidate vectors to the query vector, selecting the `k` +most similar results from each shard. The search then merges the results from +each shard to return the global top `k` nearest neighbors. + +You can increase `num_candidates` for more accurate results at the cost of +slower search speeds. A search with a high value for `num_candidates` +considers more candidates from each shard. This takes more time, but the +search has a higher probability of finding the true `k` top nearest neighbors. + +Similarly, you can decrease `num_candidates` for faster searches with +potentially less accurate results. + +[discrete] +[[approximate-knn-using-byte-vectors]] +=== Approximate kNN using byte vectors + +The approximate kNN search API supports `byte` value vectors in +addition to `float` value vectors. Use the {ref}/knn-search.html#approximate-knn[`knn` option] +to search a `dense_vector` field with {ref}/dense-vector.html#dense-vector-params[`element_type`] set to +`byte` and indexing enabled. + +. Explicitly map one or more `dense_vector` fields with +{ref}/dense-vector.html#dense-vector-params[`element_type`] set to `byte` and indexing enabled. ++ +[source,bash] +---- +curl -X PUT "${ES_URL}/byte-image-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "mappings": { + "properties": { + "byte-image-vector": { + "type": "dense_vector", + "element_type": "byte", + "dims": 2 + }, + "title": { + "type": "text" + } + } + } +} +' +---- ++ +// TEST[continued] +. Index your data ensuring all vector values +are integers within the range [-128, 127]. ++ +[source,bash] +---- +curl -X POST "${ES_URL}/byte-image-index/_bulk?refresh=true" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ "index": { "_id": "1" } } +{ "byte-image-vector": [5, -20], "title": "moose family" } +{ "index": { "_id": "2" } } +{ "byte-image-vector": [8, -15], "title": "alpine lake" } +{ "index": { "_id": "3" } } +{ "byte-image-vector": [11, 23], "title": "full moon" } +' +---- ++ +// TEST[continued] +. Run the search using the {ref}/knn-search.html#approximate-knn[`knn` option] +ensuring the `query_vector` values are integers within the +range [-128, 127]. ++ +[source,bash] +---- +curl -X POST "${ES_URL}/byte-image-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "knn": { + "field": "byte-image-vector", + "query_vector": [ + -5, + 9 + ], + "k": 10, + "num_candidates": 100 + }, + "fields": [ + "title" + ] +} +' +---- ++ +// TEST[continued] ++ +// TEST[s/"k": 10/"k": 3/] ++ +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + +[discrete] +[[knn-search-filtered-knn-search]] +=== Filtered kNN search + +The kNN search API supports restricting the search using a filter. The search +will return the top `k` documents that also match the filter query. + +The following request performs an approximate kNN search filtered by the +`file-type` field: + +[source,bash] +---- +curl -X POST "${ES_URL}/image-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "knn": { + "field": "image-vector", + "query_vector": [54, 10, -2], + "k": 5, + "num_candidates": 50, + "filter": { + "term": { + "file-type": "png" + } + } + }, + "fields": ["title"], + "_source": false +} +' +---- + +// TEST[continued] + +[NOTE] +==== +The filter is applied **during** the approximate kNN search to ensure +that `k` matching documents are returned. This contrasts with a +post-filtering approach, where the filter is applied **after** the approximate +kNN search completes. Post-filtering has the downside that it sometimes +returns fewer than k results, even when there are enough matching documents. +==== + +[discrete] +[[knn-search-combine-approximate-knn-with-other-features]] +=== Combine approximate kNN with other features + +You can perform 'hybrid retrieval' by providing both the +{ref}/knn-search.html#approximate-knn[`knn` option] and a {ref}/search-search.html#request-body-search-query[`query`]: + +[source,bash] +---- +curl -X POST "${ES_URL}/image-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "query": { + "match": { + "title": { + "query": "mountain lake", + "boost": 0.9 + } + } + }, + "knn": { + "field": "image-vector", + "query_vector": [54, 10, -2], + "k": 5, + "num_candidates": 50, + "boost": 0.1 + }, + "size": 10 +} +' +---- + +// TEST[continued] + +This search finds the global top `k = 5` vector matches, combines them with the matches from the `match` query, and +finally returns the 10 top-scoring results. The `knn` and `query` matches are combined through a disjunction, as if you +took a boolean 'or' between them. The top `k` vector results represent the global nearest neighbors across all index +shards. + +The score of each hit is the sum of the `knn` and `query` scores. You can specify a `boost` value to give a weight to +each score in the sum. In the example above, the scores will be calculated as + +[source] +---- +score = 0.9 * match_score + 0.1 * knn_score +---- + +The `knn` option can also be used with <>. +In general, {es} computes aggregations over all documents that match the search. +So for approximate kNN search, aggregations are calculated on the top `k` +nearest documents. If the search also includes a `query`, then aggregations are +calculated on the combined set of `knn` and `query` matches. + +[discrete] +[[knn-search-perform-semantic-search]] +=== Perform semantic search + +kNN search enables you to perform semantic search by using a previously deployed +{ml-docs}/ml-nlp-search-compare.html#ml-nlp-text-embedding[text embedding model]. +Instead of literal matching on search terms, semantic search retrieves results +based on the intent and the contextual meaning of a search query. + +Under the hood, the text embedding NLP model generates a dense vector from the +input query string called `model_text` you provide. Then, it is searched +against an index containing dense vectors created with the same text embedding +{ml} model. The search results are semantically similar as learned by the model. + +[IMPORTANT] +==== +To perform semantic search: + +* you need an index that contains the dense vector representation of the input +data to search against, +* you must use the same text embedding model for search that you used to create +the dense vectors from the input data, +* the text embedding NLP model deployment must be started. +==== + +Reference the deployed text embedding model or the model deployment in the +`query_vector_builder` object and provide the search query as `model_text`: + +// NOTCONSOLE + +[source,js] +---- +(...) +{ + "knn": { + "field": "dense-vector-field", + "k": 10, + "num_candidates": 100, + "query_vector_builder": { + "text_embedding": { <1> + "model_id": "my-text-embedding-model", <2> + "model_text": "The opposite of blue" <3> + } + } + } +} +(...) +---- + +<1> The {nlp} task to perform. It must be `text_embedding`. + +<2> The ID of the text embedding model to use to generate the dense vectors from +the query string. Use the same model that generated the embeddings from the +input text in the index you search against. You can use the value of the +`deployment_id` instead in the `model_id` argument. + +<3> The query string from which the model generates the dense vector +representation. + +For more information on how to deploy a trained model and use it to create text +embeddings, refer to this +{ml-docs}/ml-nlp-text-emb-vector-search-example.html[end-to-end example]. + +[discrete] +[[knn-search-search-multiple-knn-fields]] +=== Search multiple kNN fields + +In addition to 'hybrid retrieval', you can search more than one kNN vector field at a time: + +[source,bash] +---- +curl -X POST "${ES_URL}/image-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "query": { + "match": { + "title": { + "query": "mountain lake", + "boost": 0.9 + } + } + }, + "knn": [ { + "field": "image-vector", + "query_vector": [54, 10, -2], + "k": 5, + "num_candidates": 50, + "boost": 0.1 + }, + { + "field": "title-vector", + "query_vector": [1, 20, -52, 23, 10], + "k": 10, + "num_candidates": 10, + "boost": 0.5 + }], + "size": 10 +} +' +---- + +// TEST[continued] + +This search finds the global top `k = 5` vector matches for `image-vector` and the global `k = 10` for the `title-vector`. +These top values are then combined with the matches from the `match` query and the top-10 documents are returned. +The multiple `knn` entries and the `query` matches are combined through a disjunction, +as if you took a boolean 'or' between them. The top `k` vector results represent the global nearest neighbors across +all index shards. + +The scoring for a doc with the above configured boosts would be: + +[source] +---- +score = 0.9 * match_score + 0.1 * knn_score_image-vector + 0.5 * knn_score_title-vector +---- + +[discrete] +[[knn-search-search-knn-with-expected-similarity]] +=== Search kNN with expected similarity + +While kNN is a powerful tool, it always tries to return `k` nearest neighbors. Consequently, when using `knn` with +a `filter`, you could filter out all relevant documents and only have irrelevant ones left to search. In that situation, +`knn` will still do its best to return `k` nearest neighbors, even though those neighbors could be far away in the +vector space. + +To alleviate this worry, there is a `similarity` parameter available in the `knn` clause. This value is the required +minimum similarity for a vector to be considered a match. The `knn` search flow with this parameter is as follows: + +* Apply any user provided `filter` queries +* Explore the vector space to get `k` vectors +* Do not return any vectors that are further away than the configured `similarity` + +Here is an example. In this example we search for the given `query_vector` for `k` nearest neighbors. However, with +`filter` applied and requiring that the found vectors have at least the provided `similarity` between them. + +[source,bash] +---- +curl -X POST "${ES_URL}/image-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "knn": { + "field": "image-vector", + "query_vector": [1, 5, -20], + "k": 5, + "num_candidates": 50, + "similarity": 36, + "filter": { + "term": { + "file-type": "png" + } + } + }, + "fields": ["title"], + "_source": false +} +' +---- + +// TEST[continued] + +In our data set, the only document with the file type of `png` has a vector of `[42, 8, -15]`. The `l2_norm` distance +between `[42, 8, -15]` and `[1, 5, -20]` is `41.412`, which is greater than the configured similarity of `36`. Meaning, +this search will return no hits. + +[discrete] +[[nested-knn-search]] +=== Nested kNN Search + +It is common for text to exceed a particular model's token limit and requires chunking before building the embeddings +for individual chunks. When using {ref}/nested.html[`nested`] with {ref}/dense-vector.html[`dense_vector`], you can achieve nearest +passage retrieval without copying top-level document metadata. + +Here is a simple passage vectors index that stores vectors and some top-level metadata for filtering. + +[source,bash] +---- +curl -X PUT "${ES_URL}/passage_vectors" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "mappings": { + "properties": { + "full_text": { + "type": "text" + }, + "creation_time": { + "type": "date" + }, + "paragraph": { + "type": "nested", + "properties": { + "vector": { + "type": "dense_vector", + "dims": 2 + }, + "text": { + "type": "text", + "index": false + } + } + } + } + } +} +' +---- + +// TEST[continued] + +With the above mapping, we can index multiple passage vectors along with storing the individual passage text. + +[source,bash] +---- +curl -X POST "${ES_URL}/passage_vectors/_bulk?refresh=true" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ "index": { "_id": "1" } } +{ "full_text": "first paragraph another paragraph", "creation_time": "2019-05-04", "paragraph": [ { "vector": [ 0.45, 45 ], "text": "first paragraph", "paragraph_id": "1" }, { "vector": [ 0.8, 0.6 ], "text": "another paragraph", "paragraph_id": "2" } ] } +{ "index": { "_id": "2" } } +{ "full_text": "number one paragraph number two paragraph", "creation_time": "2020-05-04", "paragraph": [ { "vector": [ 1.2, 4.5 ], "text": "number one paragraph", "paragraph_id": "1" }, { "vector": [ -1, 42 ], "text": "number two paragraph", "paragraph_id": "2" } ] } +' +---- + +// TEST[continued] + +// TEST[s/\.\.\.//] + +The query will seem very similar to a typical kNN search: + +[source,bash] +---- +curl -X POST "${ES_URL}/passage_vectors/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "fields": ["full_text", "creation_time"], + "_source": false, + "knn": { + "query_vector": [ + 0.45, + 45 + ], + "field": "paragraph.vector", + "k": 2, + "num_candidates": 2 + } +} +' +---- + +// TEST[continued] + +Note below that even though we have 4 total vectors, we still return two documents. kNN search over nested dense_vectors +will always diversify the top results over the top-level document. Meaning, `"k"` top-level documents will be returned, +scored by their nearest passage vector (e.g. `"paragraph.vector"`). + +[source,console-result] +---- +{ + "took": 4, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 1.0, + "hits": [ + { + "_index": "passage_vectors", + "_id": "1", + "_score": 1.0, + "fields": { + "creation_time": [ + "2019-05-04T00:00:00.000Z" + ], + "full_text": [ + "first paragraph another paragraph" + ] + } + }, + { + "_index": "passage_vectors", + "_id": "2", + "_score": 0.9997144, + "fields": { + "creation_time": [ + "2020-05-04T00:00:00.000Z" + ], + "full_text": [ + "number one paragraph number two paragraph" + ] + } + } + ] + } +} +---- + +// TESTRESPONSE[s/"took": 4/"took" : "$body.took"/] + +What if you wanted to filter by some top-level document metadata? You can do this by adding `filter` to your +`knn` clause. + +[NOTE] +==== +`filter` will always be over the top-level document metadata. This means you cannot filter based on `nested` +field metadata. +==== + +[source,bash] +---- +curl -X POST "${ES_URL}/passage_vectors/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "fields": [ + "creation_time", + "full_text" + ], + "_source": false, + "knn": { + "query_vector": [ + 0.45, + 45 + ], + "field": "paragraph.vector", + "k": 2, + "num_candidates": 2, + "filter": { + "bool": { + "filter": [ + { + "range": { + "creation_time": { + "gte": "2019-05-01", + "lte": "2019-05-05" + } + } + } + ] + } + } + } +} +' +---- + +// TEST[continued] + +Now we have filtered based on the top level `"creation_time"` and only one document falls within that range. + +[source,console-result] +---- +{ + "took": 4, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 1.0, + "hits": [ + { + "_index": "passage_vectors", + "_id": "1", + "_score": 1.0, + "fields": { + "creation_time": [ + "2019-05-04T00:00:00.000Z" + ], + "full_text": [ + "first paragraph another paragraph" + ] + } + } + ] + } +} +---- + +// TESTRESPONSE[s/"took": 4/"took" : "$body.took"/] + +Additionally, if you wanted to extract the nearest passage for a matched document, you can supply {ref}/inner-hits.html[inner_hits] +to the `knn` clause. + +[NOTE] +==== +`inner_hits` for kNN will only ever return a single hit, the nearest passage vector. +Setting `"size"` to any value greater than `1` will have no effect on the results. +==== + +[source,bash] +---- +curl -X POST "${ES_URL}/passage_vectors/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "fields": [ + "creation_time", + "full_text" + ], + "_source": false, + "knn": { + "query_vector": [ + 0.45, + 45 + ], + "field": "paragraph.vector", + "k": 2, + "num_candidates": 2, + "inner_hits": { + "_source": false, + "fields": [ + "paragraph.text" + ] + } + } +} +' +---- + +// TEST[continued] + +Now the result will contain the nearest found paragraph when searching. + +[source,console-result] +---- +{ + "took": 4, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 1.0, + "hits": [ + { + "_index": "passage_vectors", + "_id": "1", + "_score": 1.0, + "fields": { + "creation_time": [ + "2019-05-04T00:00:00.000Z" + ], + "full_text": [ + "first paragraph another paragraph" + ] + }, + "inner_hits": { + "paragraph": { + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 1.0, + "hits": [ + { + "_index": "passage_vectors", + "_id": "1", + "_nested": { + "field": "paragraph", + "offset": 0 + }, + "_score": 1.0, + "fields": { + "paragraph": [ + { + "text": [ + "first paragraph" + ] + } + ] + } + } + ] + } + } + } + }, + { + "_index": "passage_vectors", + "_id": "2", + "_score": 0.9997144, + "fields": { + "creation_time": [ + "2020-05-04T00:00:00.000Z" + ], + "full_text": [ + "number one paragraph number two paragraph" + ] + }, + "inner_hits": { + "paragraph": { + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.9997144, + "hits": [ + { + "_index": "passage_vectors", + "_id": "2", + "_nested": { + "field": "paragraph", + "offset": 1 + }, + "_score": 0.9997144, + "fields": { + "paragraph": [ + { + "text": [ + "number two paragraph" + ] + } + ] + } + } + ] + } + } + } + } + ] + } +} +---- + +// TESTRESPONSE[s/"took": 4/"took" : "$body.took"/] + +[discrete] +[[knn-indexing-considerations]] +=== Indexing considerations + +For approximate kNN search, {es} stores the dense vector values of each +segment as an https://arxiv.org/abs/1603.09320[HNSW graph]. Indexing vectors for +approximate kNN search can take substantial time because of how expensive it is +to build these graphs. You may need to increase the client request timeout for +index and bulk requests. The {ref}/tune-knn-search.html[approximate kNN tuning guide] +contains important guidance around indexing performance, and how the index +configuration can affect search performance. + +In addition to its search-time tuning parameters, the HNSW algorithm has +index-time parameters that trade off between the cost of building the graph, +search speed, and accuracy. When setting up the `dense_vector` mapping, you +can use the {ref}/dense-vector.html#dense-vector-index-options[`index_options`] argument to adjust +these parameters: + +[source,bash] +---- +curl -X PUT "${ES_URL}/image-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "mappings": { + "properties": { + "image-vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "similarity": "l2_norm", + "index_options": { + "type": "hnsw", + "m": 32, + "ef_construction": 100 + } + } + } + } +} +' +---- + +[discrete] +[[knn-search-limitations-for-approximate-knn-search]] +=== Limitations for approximate kNN search + +{es} uses the https://arxiv.org/abs/1603.09320[HNSW algorithm] to support +efficient kNN search. Like most kNN algorithms, HNSW is an approximate method +that sacrifices result accuracy for improved search speed. This means the +results returned are not always the true _k_ closest neighbors. + +[NOTE] +==== +Approximate kNN search always uses the +{ref}/search-search.html#dfs-query-then-fetch[`dfs_query_then_fetch`] search type in order to gather +the global top `k` matches across shards. You cannot set the +`search_type` explicitly when running kNN search. +==== + +[discrete] +[[exact-knn]] +== Exact kNN + +To run an exact kNN search, use a `script_score` query with a vector function. + +. Explicitly map one or more `dense_vector` fields. If you don't intend to use +the field for approximate kNN, set the `index` mapping option to `false`. +This can significantly improve indexing speed. ++ +[source,bash] +---- +curl -X PUT "${ES_URL}/product-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "mappings": { + "properties": { + "product-vector": { + "type": "dense_vector", + "dims": 5, + "index": false + }, + "price": { + "type": "long" + } + } + } +} +' +---- +. Index your data. ++ +[source,bash] +---- +curl -X POST "${ES_URL}/product-index/_bulk?refresh=true" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ "index": { "_id": "1" } } +{ "product-vector": [230.0, 300.33, -34.8988, 15.555, -200.0], "price": 1599 } +{ "index": { "_id": "2" } } +{ "product-vector": [-0.5, 100.0, -13.0, 14.8, -156.0], "price": 799 } +{ "index": { "_id": "3" } } +{ "product-vector": [0.5, 111.3, -13.0, 14.8, -156.0], "price": 1099 } +... +' +---- ++ +// TEST[continued] ++ +// TEST[s/\.\.\.//] +. Use the search API to run a `script_score` query containing +a {ref}/query-dsl-script-score-query.html#vector-functions[vector function]. + +[TIP] +==== +To limit the number of matched documents passed to the vector function, we +recommend you specify a filter query in the `script_score.query` parameter. If +needed, you can use a {ref}/query-dsl-match-all-query.html[`match_all` query] in this +parameter to match all documents. However, matching all documents can +significantly increase search latency. + +[source,bash] +---- +curl -X POST "${ES_URL}/product-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d ' +{ + "query": { + "script_score": { + "query": { + "bool": { + "filter": { + "range": { + "price": { + "gte": 1000 + } + } + } + } + }, + "script": { + "source": "cosineSimilarity(params.queryVector, 'product-vector') + 1.0", + "params": { + "queryVector": [ + -0.5, + 90, + -10, + 14.8, + -156 + ] + } + } + } + } +} +' +---- + +// TEST[continued] +==== diff --git a/serverless/pages/knn-search.mdx b/serverless/pages/elasticsearch/knn-search.mdx similarity index 100% rename from serverless/pages/knn-search.mdx rename to serverless/pages/elasticsearch/knn-search.mdx diff --git a/serverless/pages/elasticsearch/pricing.asciidoc b/serverless/pages/elasticsearch/pricing.asciidoc new file mode 100644 index 0000000000..45889c9ea8 --- /dev/null +++ b/serverless/pages/elasticsearch/pricing.asciidoc @@ -0,0 +1,56 @@ +[[elasticsearch-billing]] += Elasticsearch billing dimensions + +:description: Learn about how Elasticsearch usage affects pricing. +:keywords: serverless, elasticsearch, overview + +preview:[] + +Elasticsearch is priced based on the consumption of the underlying +infrastructure used to support your use case, with the performance +characteristics you need. We measure by Virtual Compute Units (VCUs), which is a +slice of RAM, CPU and local disk for caching. The number of VCUs required will +depend on the amount and the rate of data sent to Elasticsearch and retained, +and the number of searches and latency you require for searches. In addition, if +you required {ml} for inference or NLP tasks, those VCUs are also +metered and billed. + +include::../../partials/minimum-vcus-detail.asciidoc[] + +[discrete] +[[elasticsearch-billing-information-about-the-vcu-types-search-ingest-and-ml]] +== Information about the VCU types (Search, Ingest, and ML) + +There are three VCU types in Elasticsearch: + +* **Indexing** — The VCUs used to index the incoming documents to be +stored in Elasticsearch. +* **Search** — The VCUs used to return search results with the latency and +Queries per Second (QPS) you require. +* **Machine Learning** — The VCUs used to perform inference, NLP tasks, and other ML activities. + +[discrete] +[[elasticsearch-billing-information-about-the-search-ai-lake-dimension-gb]] +== Information about the Search AI Lake dimension (GB) + +For Elasticsearch, the Search AI Lake is where data is stored and retained. This is +charged in GBs for the size of data at rest. Depending on the enrichment, +vectorization and other activities during ingest, this size may be different +from the original size of the source data. + +[discrete] +[[elasticsearch-billing-managing-elasticsearch-costs]] +== Managing Elasticsearch costs + +You can control costs in a number of ways. Firstly there is the amount of +data that is retained. Elasticsearch will ensure that the most recent data is +cached, allowing for fast retrieval. Reducing the amount of data means fewer +Search VCUs may be required. If you need lower latency, then more Search VCUs +can be added by adjusting the Search Power. A further refinement is for data streams that can be used to store +time series data. For that type of data, you can further define the number of +days of data you want cacheable, which will affect the number of Search VCUs and +therefore the cost. Note that Elasticsearch Serverless maintains and bills for +https://www.elastic.co/pricing/serverless-search#what-are-the-minimum-compute-resource-vcus-on-elasticsearch-serverless[minimum compute resource Ingest and Search VCUs]. + +For detailed Elasticsearch serverless project rates, check the +https://www.elastic.co/pricing/serverless-search[Elasticsearch Serverless pricing page]. diff --git a/serverless/pages/pricing.mdx b/serverless/pages/elasticsearch/pricing.mdx similarity index 100% rename from serverless/pages/pricing.mdx rename to serverless/pages/elasticsearch/pricing.mdx diff --git a/serverless/pages/elasticsearch/search-playground.asciidoc b/serverless/pages/elasticsearch/search-playground.asciidoc new file mode 100644 index 0000000000..f7ef5a69df --- /dev/null +++ b/serverless/pages/elasticsearch/search-playground.asciidoc @@ -0,0 +1,17 @@ +[[playground]] += Playground + +:description: Test and edit Elasticsearch queries and chat with your data using LLMs. +:keywords: serverless, elasticsearch, search, playground, GenAI, LLMs + +preview:[] + +Use the Search Playground to test and edit {es} queries visually in the UI. Then use the Chat Playground to combine your {es} data with large language models (LLMs) for retrieval augmented generation (RAG). +You can also view the underlying Python code that powers the chat interface, and use it in your own application. + +Find Playground in the {es} serverless UI under **{es} > Build > Playground**. + +[NOTE] +==== +ℹ️ The Playground documentation currently lives in the https://www.elastic.co/guide/en/kibana/master/playground.html[{kib} docs]. +==== diff --git a/serverless/pages/search-playground.mdx b/serverless/pages/elasticsearch/search-playground.mdx similarity index 100% rename from serverless/pages/search-playground.mdx rename to serverless/pages/elasticsearch/search-playground.mdx diff --git a/serverless/pages/elasticsearch/search-with-synonyms.asciidoc b/serverless/pages/elasticsearch/search-with-synonyms.asciidoc new file mode 100644 index 0000000000..7d5d022065 --- /dev/null +++ b/serverless/pages/elasticsearch/search-with-synonyms.asciidoc @@ -0,0 +1,122 @@ +[[elasticsearch-reference-search-with-synonyms]] += Full-text search with synonyms + +:description: Use synonyms to search for words or phrases that have the same or similar meaning. +:keywords: serverless, elasticsearch, search, synonyms + +preview:[] + +Synonyms are words or phrases that have the same or similar meaning. +They are an important aspect of search, as they can improve the search experience and increase the scope of search results. + +Synonyms allow you to: + +* **Improve search relevance** by finding relevant documents that use different terms to express the same concept. +* Make **domain-specific vocabulary** more user-friendly, allowing users to use search terms they are more familiar with. +* **Define common misspellings and typos** to transparently handle common mistakes. + +Synonyms are grouped together using **synonyms sets**. +You can have as many synonyms sets as you need. + +In order to use synonyms sets in {es}, you need to: + +* Store your synonyms set +* Configure synonyms token filters and analyzers + +[discrete] +[[elasticsearch-reference-search-with-synonyms-store-your-synonyms-set]] +== Store your synonyms set + +Your synonyms sets need to be stored in {es} so your analyzers can refer to them. +There are two ways to store your synonyms sets: + +[discrete] +[[elasticsearch-reference-search-with-synonyms-synonyms-api]] +=== Synonyms API + +You can use the {ref}/synonyms-apis.html[synonyms APIs] to manage synonyms sets. +This is the most flexible approach, as it allows to dynamically define and modify synonyms sets. + +Changes in your synonyms sets will automatically reload the associated analyzers. + +[discrete] +[[elasticsearch-reference-search-with-synonyms-inline]] +=== Inline + +You can test your synonyms by adding them directly inline in your token filter definition. + +[WARNING] +==== +Inline synonyms are not recommended for production usage. +A large number of inline synonyms increases cluster size unnecessarily and can lead to performance issues. +==== + +[discrete] +[[synonyms-synonym-token-filters]] +=== Configure synonyms token filters and analyzers + +Once your synonyms sets are created, you can start configuring your token filters and analyzers to use them. + +{es} uses synonyms as part of the {ref}/analysis-overview.html[analysis process]. +You can use two types of {ref}/analysis-tokenfilters.html[token filter]: + +* {ref}/analysis-synonym-graph-tokenfilter.html[Synonym graph token filter]: It is recommended to use it, as it can correctly handle multi-word synonyms ("hurriedly", "in a hurry"). +* {ref}/analysis-synonym-tokenfilter.html[Synonym token filter]: Not recommended if you need to use multi-word synonyms. + +Check each synonym token filter documentation for configuration details and instructions on adding it to an analyzer. + +[discrete] +[[elasticsearch-reference-search-with-synonyms-test-your-analyzer]] +=== Test your analyzer + +You can test an analyzer configuration without modifying your index settings. +Use the ((ref)/indices-analyze.html)[analyze API] to test your analyzer chain: + +[source,bash] +---- +curl "${ES_URL}/my-index/_analyze?pretty" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "tokenizer": "standard", + "filter" : [ + "lowercase", + { + "type": "synonym_graph", + "synonyms": ["pc => personal computer", "computer, pc, laptop"] + } + ], + "text" : "Check how PC synonyms work" +} +---- + +[discrete] +[[elasticsearch-reference-search-with-synonyms-apply-synonyms-at-index-or-search-time]] +=== Apply synonyms at index or search time + +Analyzers can be applied at {ref}/analysis-index-search-time.html[index time or search time]. + +You need to decide when to apply your synonyms: + +* Index time: Synonyms are applied when the documents are indexed into {es}. This is a less flexible alternative, as changes to your synonyms require {ref}/docs-reindex.html[reindexing]. +* Search time: Synonyms are applied when a search is executed. This is a more flexible approach, which doesn't require reindexing. If token filters are configured with `"updateable": true`, search analyzers can be {ref}/indices-reload-analyzers.html[reloaded] when you make changes to your synonyms. + +Synonyms sets created using the synonyms API can only be used at search time. + +You can specify the analyzer that contains your synonyms set as a {ref}/specify-analyzer.html#specify-search-analyzer[search time analyzer] or as an {ref}/specify-analyzer.html#specify-index-time-analyzer[index time analyzer]. + +The following example adds `my_analyzer` as a search analyzer to the `title` field in an index mapping: + +[source,JSON] +---- + "mappings": { + "properties": { + "title": { + "type": "text", + "search_analyzer": "my_analyzer", + "updateable": true + } + } + } +---- diff --git a/serverless/pages/search-with-synonyms.mdx b/serverless/pages/elasticsearch/search-with-synonyms.mdx similarity index 100% rename from serverless/pages/search-with-synonyms.mdx rename to serverless/pages/elasticsearch/search-with-synonyms.mdx diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc new file mode 100644 index 0000000000..20717ed300 --- /dev/null +++ b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc @@ -0,0 +1,392 @@ +[[elasticsearch-reference-semantic-search-elser]] += "Tutorial: Semantic search with ELSER" + +:description: Perform semantic search using ELSER, an NLP model trained by Elastic. +:keywords: elasticsearch, elser, semantic search + +preview:[] + +Elastic Learned Sparse EncodeR - or ELSER - is an NLP model trained by Elastic +that enables you to perform semantic search by using sparse vector +representation. Instead of literal matching on search terms, semantic search +retrieves results based on the intent and the contextual meaning of a search +query. + +The instructions in this tutorial shows you how to use ELSER to perform semantic +search on your data. + +[NOTE] +==== +Only the first 512 extracted tokens per field are considered during +semantic search with ELSER. Refer to +{ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[this page] for more +information. +==== + +[discrete] +[[requirements]] +== Requirements + +To perform semantic search by using ELSER, you must have the NLP model deployed +in your cluster. Refer to the +{ml-docs}/ml-nlp-elser.html[ELSER documentation] to learn how to download and +deploy the model. + +[NOTE] +==== +The minimum dedicated ML node size for deploying and using the ELSER model +is 4 GB in Elasticsearch Service if +{cloud}/ec-autoscaling.html[deployment autoscaling] is turned off. Turning on +autoscaling is recommended because it allows your deployment to dynamically +adjust resources based on demand. Better performance can be achieved by using +more allocations or more threads per allocation, which requires bigger ML nodes. +Autoscaling provides bigger nodes when required. If autoscaling is turned off, +you must provide suitably sized nodes yourself. +==== + +[discrete] +[[elser-mappings]] +== Create the index mapping + +First, the mapping of the destination index - the index that contains the tokens +that the model created based on your text - must be created. The destination +index must have a field with the +{ref}/sparse-vector.html[`sparse_vector`] or {ref}/rank-features.html[`rank_features`] field +type to index the ELSER output. + +[NOTE] +==== +ELSER output must be ingested into a field with the `sparse_vector` or +`rank_features` field type. Otherwise, {es} interprets the token-weight pairs as +a massive amount of fields in a document. If you get an error similar to this +`"Limit of total fields [1000] has been exceeded while adding new fields"` then +the ELSER output field is not mapped properly and it has a field type different +than `sparse_vector` or `rank_features`. +==== + +[source,bash] +---- +curl -X PUT "${ES_URL}/my-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "mappings": { + "properties": { + "content_embedding": { <1> + "type": "sparse_vector" <2> + }, + "content": { <3> + "type": "text" <4> + } + } + } +} +' +---- + +<1> The name of the field to contain the generated tokens. It must be refrenced +in the {infer} pipeline configuration in the next step. + +<2> The field to contain the tokens is a `sparse_vector` field. + +<3> The name of the field from which to create the sparse vector representation. +In this example, the name of the field is `content`. It must be referenced in the +{infer} pipeline configuration in the next step. + +<4> The field type which is text in this example. + +To learn how to optimize space, refer to the <> section. + +[discrete] +[[inference-ingest-pipeline]] +== Create an ingest pipeline with an inference processor + +Create an {ref}/ingest.html[ingest pipeline] with an +{ref}/inference-processor.html[inference processor] to use ELSER to infer against the data +that is being ingested in the pipeline. + +[source,bash] +---- +curl -X PUT "${ES_URL}/_ingest/pipeline/elser-v2-test" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "processors": [ + { + "inference": { + "model_id": ".elser_model_2", + "input_output": [ <1> + { + "input_field": "content", + "output_field": "content_embedding" + } + ] + } + } + ] +} +' +---- + +[discrete] +[[load-data]] +== Load data + +In this step, you load the data that you later use in the {infer} ingest +pipeline to extract tokens from it. + +Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS +MARCO Passage Ranking data set. It consists of 200 queries, each accompanied by +a list of relevant text passages. All unique passages, along with their IDs, +have been extracted from that data set and compiled into a +https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. + +Download the file and upload it to your cluster using the +{kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] +in the {ml-app} UI. Assign the name `id` to the first column and `content` to +the second column. The index name is `test-data`. Once the upload is complete, +you can see an index named `test-data` with 182469 documents. + +[discrete] +[[reindexing-data-elser]] +== Ingest the data through the {infer} ingest pipeline + +Create the tokens from the text by reindexing the data throught the {infer} +pipeline that uses ELSER as the inference model. + +[source,bash] +---- +curl -X POST "${ES_URL}/_reindex?wait_for_completion=false" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "source": { + "index": "test-data", + "size": 50 <1> + }, + "dest": { + "index": "my-index", + "pipeline": "elser-v2-test" + } +} +' +---- + +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +The call returns a task ID to monitor the progress: + +[source,bash] +---- +curl -X GET "${ES_URL}/_tasks/" \ +-H "Authorization: ApiKey ${API_KEY}" \ +---- + +You can also open the Trained Models UI, select the Pipelines tab under ELSER to +follow the progress. + +[discrete] +[[sparse-vector-query]] +== Semantic search by using the `sparse_vector` query + +To perform semantic search, use the `sparse_vector` query, and provide the +query text and the inference ID associated with the ELSER model service. The example below uses the query text "How to +avoid muscle soreness after running?", the `content_embedding` field contains +the generated ELSER output: + +[source,bash] +---- +curl -X GET "${ES_URL}/my-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query":{ + "sparse_vector":{ + "field": "content_embedding", + "inference_id": "my-elser-endpoint", + "query": "How to avoid muscle soreness after running?" + } + } +} +' +---- + +The result is the top 10 documents that are closest in meaning to your query +text from the `my-index` index sorted by their relevancy. The result also +contains the extracted tokens for each of the relevant search results with their +weights. + +[source,consol-result] +---- +"hits": { + "total": { + "value": 10000, + "relation": "gte" + }, + "max_score": 26.199875, + "hits": [ + { + "_index": "my-index", + "_id": "FPr9HYsBag9jXmT8lEpI", + "_score": 26.199875, + "_source": { + "content_embedding": { + "muscular": 0.2821541, + "bleeding": 0.37929374, + "foods": 1.1718726, + "delayed": 1.2112266, + "cure": 0.6848574, + "during": 0.5886185, + "fighting": 0.35022718, + "rid": 0.2752442, + "soon": 0.2967024, + "leg": 0.37649947, + "preparation": 0.32974035, + "advance": 0.09652356, + (...) + }, + "id": 1713868, + "model_id": ".elser_model_2", + "content": "For example, if you go for a run, you will mostly use the muscles in your lower body. Give yourself 2 days to rest those muscles so they have a chance to heal before you exercise them again. Not giving your muscles enough time to rest can cause muscle damage, rather than muscle development." + } + }, + (...) + ] +} +---- + +[discrete] +[[sparse-vector-compound-query]] +== Combining semantic search with other queries + +You can combine `sparse_vector` with other queries in a +{ref}/compound-queries.html[compound query]. For example using a filter clause in a +{ref}/query-dsl-bool-query.html[Boolean query] or a full text query which may or may not use the same +query text as the `sparse_vector` query. This enables you to combine the search +results from both queries. + +The search hits from the `sparse_vector` query tend to score higher than other +{es} queries. Those scores can be regularized by increasing or decreasing the +relevance scores of each query by using the `boost` parameter. Recall on the +`sparse_vector` query can be high where there is a long tail of less relevant +results. Use the `min_score` parameter to prune those less relevant documents. + +[source,bash] +---- +curl -X GET "${ES_URL}/my-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query": { + "bool": { <1> + "should": [ + { + "sparse_vector": { + "field": "content_embedding", + "query": "How to avoid muscle soreness after running?", + "inference_id": "my-elser-endpoint", + "boost": 1 <2> + } + } + }, + { + "query_string": { + "query": "toxins", + "boost": 4 <3> + } + } + ] + } + }, + "min_score": 10 <4> +} +' +---- + +<1> Both the `sparse_vector` and the `query_string` queries are in a `should` +clause of a `bool` query. + +<2> The `boost` value is `1` for the `sparse_vector` query which is the default +value. This means that the relevance score of the results of this query are not +boosted. + +<3> The `boost` value is `4` for the `query_string` query. The relevance score +of the results of this query is increased causing them to rank higher in the +search results. + +<4> Only the results with a score equal to or higher than `10` are displayed. + +[discrete] +[[optimization]] += Optimizing performance + +[discrete] +[[save-space]] +== Saving disk space by excluding the ELSER tokens from document source + +The tokens generated by ELSER must be indexed for use in the +{ref}/query-dsl-sparse-vector-query.html[sparse_vector query]. However, it is not +necessary to retain those terms in the document source. You can save disk space +by using the {ref}/mapping-source-field.html#include-exclude[source exclude] mapping to remove the ELSER +terms from the document source. + +[WARNING] +==== +Reindex uses the document source to populate the destination index. +Once the ELSER terms have been excluded from the source, they cannot be +recovered through reindexing. Excluding the tokens from the source is a +space-saving optimsation that should only be applied if you are certain that +reindexing will not be required in the future! It's important to carefully +consider this trade-off and make sure that excluding the ELSER terms from the +source aligns with your specific requirements and use case. +==== + +The mapping that excludes `content_embedding` from the `_source` field can be +created by the following API call: + +[source,bash] +---- +curl -X PUT "${ES_URL}/my-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "mappings": { + "_source": { + "excludes": [ + "content_embedding" + ] + }, + "properties": { + "content_embedding": { + "type": "sparse_vector" + }, + "content": { + "type": "text" + } + } + } +} +' +---- + +[discrete] +[[further-reading]] +== Further reading + +* {ml-docs}/ml-nlp-elser.html[How to download and deploy ELSER] +* {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[ELSER limitation] +* https://www.elastic.co/blog/may-2023-launch-information-retrieval-elasticsearch-ai-model[Improving information retrieval in the Elastic Stack: Introducing Elastic Learned Sparse Encoder, our new retrieval model] + +[discrete] +[[interactive-example]] +== Interactive example + +* The `elasticsearch-labs` repo has an interactive example of running https://github.com/elastic/elasticsearch-labs/blob/main/notebooks/search/03-ELSER.ipynb[ELSER-powered semantic search] using the {es} Python client. diff --git a/serverless/pages/search-your-data-semantic-search-elser.mdx b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx similarity index 100% rename from serverless/pages/search-your-data-semantic-search-elser.mdx rename to serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc b/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc new file mode 100644 index 0000000000..d64f9fa3fa --- /dev/null +++ b/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc @@ -0,0 +1,143 @@ +[[elasticsearch-reference-semantic-search]] += Semantic search + +:description: Find data based on the intent and contextual meaning of a search query with semantic search +:keywords: elasticsearch, elser, semantic search + +preview:[] + +Semantic search is a search method that helps you find data based on the intent +and contextual meaning of a search query, instead of a match on query terms +(lexical search). + +{es} provides semantic search capabilities using {ml-docs}/ml-nlp.html[natural +language processing (NLP)] and vector search. Deploying an NLP model to {es} +enables it to extract text embeddings out of text. Embeddings are vectors that +provide a numeric representation of a text. Pieces of content with similar +meaning have similar representations. + +_A simplified representation of encoding textual concepts as vectors_ + +At query time, {es} can use the same NLP model to convert a query into +embeddings, enabling you to find documents with similar text embeddings. + +This guide shows you how to implement semantic search with {es}, from selecting +an NLP model, to writing queries. + +[discrete] +[[semantic-search-select-nlp-model]] +== Select an NLP model + +{es} offers the usage of a +{ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-embedding[wide range of NLP models], +including both dense and sparse vector models. Your choice of the language model +is critical for implementing semantic search successfully. + +While it is possible to bring your own text embedding model, achieving good +search results through model tuning is challenging. Selecting an appropriate +model from our third-party model list is the first step. Training the model on +your own data is essential to ensure better search results than using only BM25. +However, the model training process requires a team of data scientists and ML +experts, making it expensive and time-consuming. + +To address this issue, Elastic provides a pre-trained representational model +called {ml-docs}/ml-nlp-elser.html[Elastic Learned Sparse EncodeR (ELSER)]. +ELSER, currently available only for English, is an out-of-domain sparse vector +model that does not require fine-tuning. This adaptability makes it suitable for +various NLP use cases out of the box. Unless you have a team of ML specialists, +it is highly recommended to use the ELSER model. + +In the case of sparse vector representation, the vectors mostly consist of zero +values, with only a small subset containing non-zero values. This representation +is commonly used for textual data. In the case of ELSER, each document in an +index and the query text itself are represented by high-dimensional sparse +vectors. Each non-zero element of the vector corresponds to a term in the model +vocabulary. The ELSER vocabulary contains around 30000 terms, so the sparse +vectors created by ELSER contain about 30000 values, the majority of which are +zero. Effectively the ELSER model is replacing the terms in the original query +with other terms that have been learnt to exist in the documents that best match +the original search terms in a training dataset, and weights to control how +important each is. + +[discrete] +[[semantic-search-deploy-nlp-model]] +== Deploy the model + +After you decide which model you want to use for implementing semantic search, +you need to deploy the model in {es}. + +include::../../partials/deploy-nlp-model-widget.asciidoc[] + +[discrete] +[[semantic-search-field-mappings]] +== Map a field for the text embeddings + +Before you start using the deployed model to generate embeddings based on your +input text, you need to prepare your index mapping first. The mapping of the +index depends on the type of model. + +include::../../partials/field-mappings-widget.asciidoc[] + +[discrete] +[[semantic-search-generate-embeddings]] +== Generate text embeddings + +Once you have created the mappings for the index, you can generate text +embeddings from your input text. This can be done by using an +{ref}/ingest.html[ingest pipeline] with an {ref}/inference-processor.html[inference processor]. +The ingest pipeline processes the input data and indexes it into the destination +index. At index time, the inference ingest processor uses the trained model to +infer against the data ingested through the pipeline. After you created the +ingest pipeline with the inference processor, you can ingest your data through +it to generate the model output. + +include::../../partials/generate-embeddings-widget.asciidoc[] + +Now it is time to perform semantic search! + +[discrete] +[[semantic-search-search]] +== Search the data + +Depending on the type of model you have deployed, you can query sparse vectors +with a sparse vector query, or dense vectors with a kNN search. + +include::../../partials/search-widget.asciidoc[] + +[discrete] +[[semantic-search-hybrid-search]] +== Beyond semantic search with hybrid search + +In some situations, lexical search may perform better than semantic search. For +example, when searching for single words or IDs, like product numbers. + +Combining semantic and lexical search into one hybrid search request using +{ref}/rrf.html[reciprocal rank fusion] provides the best of both worlds. Not only that, +but hybrid search using reciprocal rank fusion {blog-ref}improving-information-retrieval-elastic-stack-hybrid[has been shown to perform better +in general]. + +include::../../partials/hybrid-search-widget.asciidoc[] + +[discrete] +[[semantic-search-read-more]] +== Read more + +* Tutorials: ++ +** <> +** {ml-docs}/ml-nlp-text-emb-vector-search-example.html[Semantic search with the msmarco-MiniLM-L-12-v3 sentence-transformer model] +* Blogs: ++ +** {blog-ref}may-2023-launch-sparse-encoder-ai-model[Introducing Elastic Learned Sparse Encoder: Elastic's AI model for semantic search] +** {blog-ref}lexical-ai-powered-search-elastic-vector-database[How to get the best of lexical and AI-powered search with Elastic's vector database] +** Information retrieval blog series: ++ +*** {blog-ref}improving-information-retrieval-elastic-stack-search-relevance[Part 1: Steps to improve search relevance] +*** {blog-ref}improving-information-retrieval-elastic-stack-benchmarking-passage-retrieval[Part 2: Benchmarking passage retrieval] +*** {blog-ref}may-2023-launch-information-retrieval-elasticsearch-ai-model[Part 3: Introducing Elastic Learned Sparse Encoder, our new retrieval model] +*** {blog-ref}improving-information-retrieval-elastic-stack-hybrid[Part 4: Hybrid retrieval] +* Interactive examples: ++ +** The https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs`] repo contains a number of interactive semantic search examples in the form of executable Python notebooks, using the {es} Python client. + +// The include that was here is another page diff --git a/serverless/pages/search-your-data-semantic-search.mdx b/serverless/pages/elasticsearch/search-your-data-semantic-search.mdx similarity index 100% rename from serverless/pages/search-your-data-semantic-search.mdx rename to serverless/pages/elasticsearch/search-your-data-semantic-search.mdx diff --git a/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc b/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc new file mode 100644 index 0000000000..af56be645c --- /dev/null +++ b/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc @@ -0,0 +1,22 @@ +[[search-your-data-the-search-api]] += The search API + +:description: Run queries and aggregations with the search API. +:keywords: serverless, elasticsearch, API + +preview:[] + +A _search_ consists of one or more queries that are combined and sent to {es}. +Documents that match a search's queries are returned in the _hits_, or +_search results_, of the response. + +A search may also contain additional information used to better process its +queries. For example, a search may be limited to a specific index or only return +a specific number of results. + +You can use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/group/endpoint-search[search API] to search and +aggregate data stored in {es} data streams or indices. +The API's `query` request body parameter accepts queries written in +{ref}/query-dsl.html[Query DSL]. + +For more information, refer to {ref}/search-your-data.html[the search API overview] in the classic {es} docs. diff --git a/serverless/pages/search-your-data-the-search-api.mdx b/serverless/pages/elasticsearch/search-your-data-the-search-api.mdx similarity index 100% rename from serverless/pages/search-your-data-the-search-api.mdx rename to serverless/pages/elasticsearch/search-your-data-the-search-api.mdx diff --git a/serverless/pages/elasticsearch/search-your-data.asciidoc b/serverless/pages/elasticsearch/search-your-data.asciidoc new file mode 100644 index 0000000000..ad422f99cd --- /dev/null +++ b/serverless/pages/elasticsearch/search-your-data.asciidoc @@ -0,0 +1,28 @@ +[[search-your-data]] += Search your data + +:description: Use the search API to run queries on your data. +:keywords: serverless, elasticsearch, search + +preview:[] + +A search query, or query, is a request for information about data in {es} data streams or indices. + +You can think of a query as a question, written in a way {es} understands. Depending on your data, you can use a query to get answers to questions like: + +* What processes on my server take longer than 500 milliseconds to respond? +* What users on my network ran regsvr32.exe within the last week? +* What pages on my website contain a specific word or phrase? + +You run search queries using the <>. The API supports several query types and search methods: + +**Search for exact values.** +Use {ref}/term-level-queries.html[term-level queries] to filter numbers, dates, IPs, or strings based on exact values or ranges. + +**Full-text search.** +Use {ref}/full-text-queries.html[full-text queries] to query {ref}/analysis.html#analysis[unstructured text] and find documents that best match query terms. Use <> to search for words or phrases that have the same or similar meaning. + +**Vector search.** +Store dense vectors in {es} and use <> to find similar vectors. + +You can also use Elastic's natural language processing (NLP) model to encode text as sparse or dense vectors. Then use <> to find data based on the intent and contextual meaning rather than matching keywords. diff --git a/serverless/pages/search-your-data.mdx b/serverless/pages/elasticsearch/search-your-data.mdx similarity index 100% rename from serverless/pages/search-your-data.mdx rename to serverless/pages/elasticsearch/search-your-data.mdx diff --git a/serverless/pages/elasticsearch/serverless-differences.asciidoc b/serverless/pages/elasticsearch/serverless-differences.asciidoc new file mode 100644 index 0000000000..7161d8d627 --- /dev/null +++ b/serverless/pages/elasticsearch/serverless-differences.asciidoc @@ -0,0 +1,42 @@ +[[differences]] += Differences from other Elasticsearch offerings + +:description: Understand how serverless Elasticsearch differs from Elastic Cloud Hosted and self-managed offerings. +:keywords: serverless, elasticsearch + +++++ +Serverless differences +++++ + +preview:[] + +Some features that are available in Elastic Cloud Hosted and self-managed offerings are not available in serverless {es}. +These features have either been replaced by a new feature, or are not applicable in the new Serverless architecture: + +* **Index lifecycle management {(ilm-init})** is not available, in favor of **https://www.elastic.co/docs/current/serverless/index-management[data stream lifecycle]**. ++ +In an Elastic Cloud Hosted or self-managed environment, {ilm-init} lets you automatically transition indices through data tiers according to your +performance needs and retention requirements. This allows you to balance hardware costs with performance. Serverless Elasticsearch eliminates this +complexity by optimizing your cluster performance for you. ++ +Data stream lifecycle is an optimized lifecycle tool that lets you focus on the most common lifecycle management needs, without unnecessary +hardware-centric concepts like data tiers. ++ +* **Watcher** is not available, in favor of **<>**. ++ +Kibana Alerts allows rich integrations across use cases like APM, metrics, security, and uptime. Prepackaged rule types simplify setup and +hide the details of complex, domain-specific detections, while providing a consistent interface across Kibana. ++ +* Certain APIs, API parameters, index, cluster and node level settings are not available. Refer to our +<> for a list of available APIs. ++ +Serverless Elasticsearch manages the underlying Elastic cluster for you, optimizing nodes, shards, and replicas for your use case. +Because of this, various management and monitoring APIs, API parameters and settings are not available on Serverless. ++ +* {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Scripted metric aggregations] are not available. + +.Other limitations +[IMPORTANT] +==== +For serverless technical preview limitations, refer to <>. +==== diff --git a/serverless/pages/serverless-differences.mdx b/serverless/pages/elasticsearch/serverless-differences.mdx similarity index 100% rename from serverless/pages/serverless-differences.mdx rename to serverless/pages/elasticsearch/serverless-differences.mdx diff --git a/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc b/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc new file mode 100644 index 0000000000..1d4cb6e165 --- /dev/null +++ b/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc @@ -0,0 +1,25 @@ +[[technical-preview-limitations]] += Technical preview limitations + +:description: Review the limitations that apply to Elasticsearch projects. +:keywords: serverless, elasticsearch + +preview:[] + +The following are currently not available: + +* Custom plugins and custom bundles +* Reindexing from remote clusters +* Cross-cluster search and cross-cluster replication +* Snapshot and restore +* Clone index API +* Migrations from non-serverless {es} deployments. In the interim, you can <> to move data to and from serverless projects. +* Custom roles +* Audit logging +* Elasticsearch for Apache Hadoop + +Currently, workloads outside of the following ranges may experience higher latencies (greater than sub-second): + +* Search queries on indices greater than 150GB +* Index queries per second (QPS) greater than 1000 +* Search queries per second (QPS) greater than 1800 diff --git a/serverless/pages/technical-preview-limitations.mdx b/serverless/pages/elasticsearch/technical-preview-limitations.mdx similarity index 100% rename from serverless/pages/technical-preview-limitations.mdx rename to serverless/pages/elasticsearch/technical-preview-limitations.mdx diff --git a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc new file mode 100644 index 0000000000..a950da8126 --- /dev/null +++ b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc @@ -0,0 +1,63 @@ +:description: Build search solutions and applications with {es}. +:keywords: serverless, elasticsearch, overview + +preview:[] + + + +.Understanding Elasticsearch on serverless +[IMPORTANT] +==== +Refer to <> and <> for important details, including features and limitations specific to {es} on serverless. +==== + + + + diff --git a/serverless/pages/what-is-elasticsearch-serverless.mdx b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.mdx similarity index 100% rename from serverless/pages/what-is-elasticsearch-serverless.mdx rename to serverless/pages/elasticsearch/what-is-elasticsearch-serverless.mdx diff --git a/serverless/pages/general/cloud-regions.asciidoc b/serverless/pages/general/cloud-regions.asciidoc new file mode 100644 index 0000000000..5850b022e4 --- /dev/null +++ b/serverless/pages/general/cloud-regions.asciidoc @@ -0,0 +1,36 @@ +[[-serverless-regions]] += Serverless regions + +:description: Index, search, and manage {es} data in your preferred language. +:keywords: serverless, regions, aws, cloud + +A region is the geographic area where the data center of the cloud provider that hosts your project is located. Review the available Elastic Cloud Serverless regions to decide which region to use. If you aren't sure which region to pick, choose one that is geographically close to you to reduce latency. + +Elastic Cloud Serverless handles all hosting details for you. You are unable to change the region after you create a project. + +[NOTE] +==== +Currently, a limited number of Amazon Web Services (AWS) regions are available. More regions for AWS, as well as Microsoft Azure and Google Cloud Platform (GCP), will be added in the future. +==== + +[discrete] +[[-serverless-regions-amazon-web-services-aws-regions]] +== Amazon Web Services (AWS) regions + +The following AWS regions are currently available: + +|=== +| Region| Name + +| ap-southeast-1 +| Asia Pacific (Singapore) + +| eu-west-1 +| Europe (Ireland) + +| us-east-1 +| US East (N. Virginia) + +| us-west-2 +| US West (Oregon) +|=== diff --git a/serverless/pages/cloud-regions.mdx b/serverless/pages/general/cloud-regions.mdx similarity index 100% rename from serverless/pages/cloud-regions.mdx rename to serverless/pages/general/cloud-regions.mdx diff --git a/serverless/pages/general/index.asciidoc b/serverless/pages/general/index.asciidoc new file mode 100644 index 0000000000..4036873e17 --- /dev/null +++ b/serverless/pages/general/index.asciidoc @@ -0,0 +1,31 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + += Welcome to Elastic serverless + +include::./what-is-serverless.asciidoc[leveloffset=+1] + +include::./sign-up.asciidoc[leveloffset=+1] + +include::./manage-org.asciidoc[leveloffset=+1] +include::./manage-access-to-org.asciidoc[leveloffset=+2] +include::./manage-access-to-org-user-roles.asciidoc[leveloffset=+2] +include::./manage-access-to-org-from-existing-account.asciidoc[leveloffset=+2] + +include::undefined[leveloffset=+1] +include::./manage-your-project-rest-api.asciidoc[leveloffset=+2] + +include::./manage-billing.asciidoc[leveloffset=+1] +include::./manage-billing-check-subscription.asciidoc[leveloffset=+2] +include::./manage-billing-monitor-usage.asciidoc[leveloffset=+2] +include::./manage-billing-history.asciidoc[leveloffset=+2] +include::./manage-billing-pricing-model.asciidoc[leveloffset=+2] +include::./manage-billing-stop-project.asciidoc[leveloffset=+2] + +include::./service-status.asciidoc[leveloffset=+1] + +include::./user-profile.asciidoc[leveloffset=+1] + +include::undefined[leveloffset=+1] diff --git a/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc b/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc new file mode 100644 index 0000000000..3917d19188 --- /dev/null +++ b/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc @@ -0,0 +1,17 @@ +[[join-organization-from-existing-cloud-account]] += Join an organization from an existing Elastic Cloud account + +:description: Join a new organization and bring over your projects. +:keywords: serverless, general, organization, join, how to + +preview:[] + +If you already belong to an organization, and you want to join a new one, it is currently not possible to bring your projects over to the new organization. + +If you want to join a new project, follow these steps: + +. Make sure you do not have active projects before you leave your current organization. +. Delete your projects and clear any bills. +. Leave your current organization. +. Ask the administrator to invite you to the organization you want to join. +. Accept the invitation that you will get by email. diff --git a/serverless/pages/manage-access-to-org-from-existing-account.mdx b/serverless/pages/general/manage-access-to-org-from-existing-account.mdx similarity index 100% rename from serverless/pages/manage-access-to-org-from-existing-account.mdx rename to serverless/pages/general/manage-access-to-org-from-existing-account.mdx diff --git a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc new file mode 100644 index 0000000000..e65d0ae8ee --- /dev/null +++ b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc @@ -0,0 +1,76 @@ +[[assign-user-roles]] += Assign user roles and privileges + +:description: Manage the predefined set of roles and privileges for all your projects. +:keywords: serverless, general, organization, roles, how to + +preview:[] + +Within an organization, users can have one or more roles and each role grants specific privileges. + +You must assign user roles when you <>. +To subsequently edit the roles assigned to a user: + +. Go to the user icon on the header bar and select **Organization**. +. Find the user on the **Members** tab of the **Organization** page. Click the member name to view and edit its roles. + +[discrete] +[[assign-user-roles-organization-level-roles]] +== Organization-level roles + +* **Organization owner**. Can manage all roles under the organization and has full access to all serverless projects, organization-level details, billing details, and subscription levels. This role is assigned by default to the person who created the organization. +* **Billing admin**. Has access to all invoices and payment methods. Can make subscription changes. + +[discrete] +[[assign-user-roles-instance-access-roles]] +== Instance access roles + +Each serverless project type has a set of predefined roles that you can assign to your organization members. +You can assign the predefined roles: + +* globally, for all projects of the same type {(es-serverless}, {observability}, or {security}). In this case, the role will also apply to new projects created later. +* individually, for specific projects only. To do that, you have to set the **Role for all** field of that specific project type to **None**. + +For example, you can assign a user the developer role for a specific {es-serverless} project: + +[role="screenshot"] +image::images/individual-role.png[Individual role] + +ifeval::["{serverlessCustomRoles}" == "true"] +You can also optionally https://www.elastic.co/docs/current/serverless/custom-roles[create custom roles in a project]. +To assign a custom role to users, go to "Instance access roles" and select it from the list under the specific project it was created in. +endif::[] + +[discrete] +[[assign-user-roles-es]] +=== {es} + +* **Admin**. Has full access to project management, properties, and security privileges. Admins log into projects with superuser role privileges. +* **Developer**. Creates API keys, indices, data streams, adds connectors, and builds visualizations. +* **Viewer**. Has read-only access to project details, data, and features. + +[discrete] +[[assign-user-roles-observability]] +=== {observability} + +* **Admin**. Has full access to project management, properties, and security privileges. Admins log into projects with superuser role privileges. +* **Editor**. Configures all Observability projects. Has read-only access to data indices. Has full access to all project features. +* **Viewer**. Has read-only access to project details, data, and features. + +[discrete] +[[assign-user-roles-security]] +=== {security} + +* **Admin**. Has full access to project management, properties, and security privileges. Admins log into projects with superuser role privileges. +* **Editor**. Configures all Security projects. Has read-only access to data indices. Has full access to all project features. +* **Viewer**. Has read-only access to project details, data, and features. +* **Tier 1 analyst**. Ideal for initial alert triage. General read access, can create dashboards and visualizations. +* **Tier 2 analyst**. Ideal for alert triage and beginning the investigation process. Can create cases. +* **Tier 3 analyst**. Deeper investigation capabilities. Access to rules, lists, cases, Osquery, and response actions. +* **Threat intelligence analyst**. Access to alerts, investigation tools, and intelligence pages. +* **Rule author**. Access to detection engineering and rule creation. Can create rules from available data sources and add exceptions to reduce false positives. +* **SOC manager**. Access to alerts, cases, investigation tools, endpoint policy management, and response actions. +* **Endpoint operations analyst**. Access to endpoint response actions. Can manage endpoint policies, {fleet}, and integrations. +* **Platform engineer**. Access to {fleet}, integrations, endpoints, and detection content. +* **Detections admin**. All available detection engine permissions to include creating rule actions, such as notifications to third-party systems. +* **Endpoint policy manager**. Access to endpoint policy management and related artifacts. Can manage {fleet} and integrations. diff --git a/serverless/pages/manage-access-to-org-user-roles.mdx b/serverless/pages/general/manage-access-to-org-user-roles.mdx similarity index 100% rename from serverless/pages/manage-access-to-org-user-roles.mdx rename to serverless/pages/general/manage-access-to-org-user-roles.mdx diff --git a/serverless/pages/general/manage-access-to-org.asciidoc b/serverless/pages/general/manage-access-to-org.asciidoc new file mode 100644 index 0000000000..7c1b8354bb --- /dev/null +++ b/serverless/pages/general/manage-access-to-org.asciidoc @@ -0,0 +1,32 @@ +[[manage-access-to-organization]] += Invite your team + +:description: Add members to your organization and projects. +:keywords: serverless, general, organization, overview + +To allow other users to interact with your projects, you must invite them to join your organization and grant them access to your organization resources and instances. + +Alternatively, {cloud}/ec-saml-sso.html[configure {ecloud} SAML SSO] to enable your organization members to join the {ecloud} organization automatically. preview:[] + +. Go to the user icon on the header bar and select **Organization**. +. Click **Invite members**. ++ +You can add multiple members by entering their email addresses separated by a space. ++ +You can grant access to all projects of the same type with a unique role, or select individual roles for specific projects. +For more details about roles, refer to <>. +. Click **Send invites**. ++ +Invitations to join an organization are sent by email. Invited users have 72 hours to accept the invitation. If they do not join within that period, you will have to send a new invitation. + +On the **Members** tab of the **Organization** page, you can view the list of current members, their status and role. + +In the **Actions** column, click the three dots to edit a member’s role or revoke the invite. + +[discrete] +[[manage-access-to-organization-leave-an-organization]] +== Leave an organization + +On the **Organization** page, click **Leave organization**. + +If you're the only user in the organization, you can only leave if you have deleted all your projects and don't have any pending bills. diff --git a/serverless/pages/manage-access-to-org.mdx b/serverless/pages/general/manage-access-to-org.mdx similarity index 100% rename from serverless/pages/manage-access-to-org.mdx rename to serverless/pages/general/manage-access-to-org.mdx diff --git a/serverless/pages/general/manage-billing-check-subscription.asciidoc b/serverless/pages/general/manage-billing-check-subscription.asciidoc new file mode 100644 index 0000000000..5c03d2c190 --- /dev/null +++ b/serverless/pages/general/manage-billing-check-subscription.asciidoc @@ -0,0 +1,18 @@ +[[check-subscription]] += Check your subscription + +:description: Manage your account details and subscription level. +:keywords: serverless, general, billing, subscription + +preview:[] + +To find more details about your subscription: + +. Navigate to https://cloud.elastic.co/[cloud.elastic.co] and log in to your Elastic Cloud account. +. Go to the user icon on the header bar and select **Billing**. + +On the **Overview** page you can: + +* Update your subscription level +* Check the date when your next bill will be issued and update the payment method +* Check your account details and add Elastic Consumption Units (ECU) credits diff --git a/serverless/pages/manage-billing-check-subscription.mdx b/serverless/pages/general/manage-billing-check-subscription.mdx similarity index 100% rename from serverless/pages/manage-billing-check-subscription.mdx rename to serverless/pages/general/manage-billing-check-subscription.mdx diff --git a/serverless/pages/general/manage-billing-history.asciidoc b/serverless/pages/general/manage-billing-history.asciidoc new file mode 100644 index 0000000000..db216feff6 --- /dev/null +++ b/serverless/pages/general/manage-billing-history.asciidoc @@ -0,0 +1,15 @@ +[[billing-history]] += Check your billing history + +:description: Monitor payments and billing receipts. +:keywords: serverless, general, billing, history + +preview:[] + +Information about outstanding payments and billing receipts is available from the {ess-console}[{ess-console-name}]. + +To check your billing history: + +. Log in to the {ess-console}[{ess-console-name}]. +. Select the user icon on the header bar and choose **Billing** from the menu. +. Under the **History** tab, select the invoice number for a detailed PDF. diff --git a/serverless/pages/manage-billing-history.mdx b/serverless/pages/general/manage-billing-history.mdx similarity index 100% rename from serverless/pages/manage-billing-history.mdx rename to serverless/pages/general/manage-billing-history.mdx diff --git a/serverless/pages/general/manage-billing-monitor-usage.asciidoc b/serverless/pages/general/manage-billing-monitor-usage.asciidoc new file mode 100644 index 0000000000..a804c10086 --- /dev/null +++ b/serverless/pages/general/manage-billing-monitor-usage.asciidoc @@ -0,0 +1,28 @@ +[[monitor-usage]] += Monitor your account usage + +:description: Check the usage breakdown of your account. +:keywords: serverless, general, billing, usage + +preview:[] + +To find more details about your account usage: + +. Navigate to https://cloud.elastic.co/[cloud.elastic.co] and log in to your {ecloud} account. +. Go to the user icon on the header bar and select **Billing**. + +On the **Usage** page you can: + +* Monitor the usage for the current month, including total hourly rate and month-to-date usage +* Check the usage breakdown for a selected time range + +[IMPORTANT] +==== +The usage breakdown information is an estimate. To get the exact amount you owe for a given month, check your invoices in the <>. +==== + +.Elasticsearch minimum runtime VCUs +[IMPORTANT] +==== +When you create an Elasticsearch Serverless project, a minimum number of VCUs are always allocated to your project to maintain basic ingest and search capabilities. These VCUs incur a minimum cost even with no active usage. Learn more about https://www.elastic.co/pricing/serverless-search#what-are-the-minimum-compute-resource-vcus-on-elasticsearch-serverless[minimum VCUs on Elasticsearch Serverless]. +==== diff --git a/serverless/pages/manage-billing-monitor-usage.mdx b/serverless/pages/general/manage-billing-monitor-usage.mdx similarity index 100% rename from serverless/pages/manage-billing-monitor-usage.mdx rename to serverless/pages/general/manage-billing-monitor-usage.mdx diff --git a/serverless/pages/general/manage-billing-pricing-model.asciidoc b/serverless/pages/general/manage-billing-pricing-model.asciidoc new file mode 100644 index 0000000000..7602c012cd --- /dev/null +++ b/serverless/pages/general/manage-billing-pricing-model.asciidoc @@ -0,0 +1,45 @@ +[[serverless-billing]] += Serverless billing dimensions + +:description: Understand how usage affects serverless pricing. +:keywords: serverless, general, billing, pricing model + +preview:[] + +Elastic Cloud serverless billing is based on your usage across these dimensions: + +* <> +* <> + +[discrete] +[[offerings]] +== Offerings + +To learn about billing dimensions for specific offerings, refer to: + +* https://www.elastic.co/docs/current/serverless/elasticsearch/elasticsearch-billing[] +* https://www.elastic.co/docs/current/serverless/observability/observability-billing[] +* https://www.elastic.co/docs/current/serverless/security/security-billing[] + +[discrete] +[[add-ons]] +== Add-ons + +[discrete] +[[serverless-billing-data-out]] +=== Data out + +_Data out_ accounts for all of the traffic coming out of a serverless project. +This includes search results, as well as monitoring data sent from the project. +The same rate applies regardless of the destination of the data, whether to the internet, +another region, or a cloud provider account in the same region. +Data coming out of the project through AWS PrivateLink, GCP Private Service Connect, +or Azure Private Link is also considered data out. + +[discrete] +[[serverless-billing-support]] +=== Support + +If your subscription level is Standard, there is no separate charge for Support reflected on your bill. +If your subscription level is Gold, Platinum, or Enterprise, a charge is made for Support as a percentage (%) of the ECUs. +To find out more about our support levels, go to https://www.elastic.co/support[https://www.elastic.co/support]. diff --git a/serverless/pages/manage-billing-pricing-model.mdx b/serverless/pages/general/manage-billing-pricing-model.mdx similarity index 100% rename from serverless/pages/manage-billing-pricing-model.mdx rename to serverless/pages/general/manage-billing-pricing-model.mdx diff --git a/serverless/pages/general/manage-billing-stop-project.asciidoc b/serverless/pages/general/manage-billing-stop-project.asciidoc new file mode 100644 index 0000000000..4fd1b03847 --- /dev/null +++ b/serverless/pages/general/manage-billing-stop-project.asciidoc @@ -0,0 +1,17 @@ +[[billing-stop-project]] += Stop charges for a project + +:description: How to stop charges for a project. +:keywords: serverless, general, billing + +preview:[] + +Got a project you no longer need and don't want to be charged for? Simply delete it. + +Warning: All data is lost. Billing for usage is by the hour and any outstanding charges for usage before you deleted the project will still appear on your next bill. + +To stop being charged for a project: + +. Log in to the {ess-console}[{ess-console-name}]. +. Find your project on the home page in the **Serverless Projects** card and select **Manage** to access it directly. Or, select **Serverless Projects** to go to the projects page to view all of your projects. +. Select **Actions**, then select **Delete project** and confirm the deletion. diff --git a/serverless/pages/manage-billing-stop-project.mdx b/serverless/pages/general/manage-billing-stop-project.mdx similarity index 100% rename from serverless/pages/manage-billing-stop-project.mdx rename to serverless/pages/general/manage-billing-stop-project.mdx diff --git a/serverless/pages/general/manage-billing.asciidoc b/serverless/pages/general/manage-billing.asciidoc new file mode 100644 index 0000000000..1fb6f06325 --- /dev/null +++ b/serverless/pages/general/manage-billing.asciidoc @@ -0,0 +1,32 @@ +[[manage-billing]] += Manage billing of your organization + +:description: Configure the billing details of your organization. +:keywords: serverless, general, billing, overview + +++++ +Manage billing +++++ + +preview:[] + +.Serverless billing starts June 1, 2024 +[IMPORTANT] +==== +Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the https://cloud.elastic.co/billing/usage?[Billing Usage page]. Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. +==== + +You can manage the billing details of your organization directly from the Elastic Cloud console. + +. Navigate to https://cloud.elastic.co/[cloud.elastic.co] and log in to your Elastic Cloud account. +. Go to the user icon on the header bar and select **Billing**. + +From the **Billing pages**, you can perform the following tasks: + +* <> +* <> +* <> + +If you have a project that you're no longer using, refer to <>. + +To learn about the serverless pricing model, refer to <> and our https://www.elastic.co/pricing/serverless-search[pricing page]. diff --git a/serverless/pages/manage-billing.mdx b/serverless/pages/general/manage-billing.mdx similarity index 100% rename from serverless/pages/manage-billing.mdx rename to serverless/pages/general/manage-billing.mdx diff --git a/serverless/pages/general/manage-org.asciidoc b/serverless/pages/general/manage-org.asciidoc new file mode 100644 index 0000000000..45e922b227 --- /dev/null +++ b/serverless/pages/general/manage-org.asciidoc @@ -0,0 +1,25 @@ +[[manage-organization]] += Manage your organization + +:description: Manage your instances, users, and settings. +:keywords: serverless, general, organization, overview + +preview:[] + +When you sign up to Elastic Cloud, you create an **organization**. + +This organization is the umbrella for all of your Elastic Cloud resources, users, and account settings. Every organization has a unique identifier. Bills are invoiced according to the billing contact and details that you set for your organization. + + diff --git a/serverless/pages/manage-org.mdx b/serverless/pages/general/manage-org.mdx similarity index 100% rename from serverless/pages/manage-org.mdx rename to serverless/pages/general/manage-org.mdx diff --git a/serverless/pages/general/manage-your-project-rest-api.asciidoc b/serverless/pages/general/manage-your-project-rest-api.asciidoc new file mode 100644 index 0000000000..8e6872ae58 --- /dev/null +++ b/serverless/pages/general/manage-your-project-rest-api.asciidoc @@ -0,0 +1,204 @@ +[[manage-project-with-api]] += Using the Project Management REST API + +:description: Manage your organization's serverless projects using the REST API. +:keywords: serverless, project, manage, rest, api + +preview:[] + +You can manage serverless projects using the https://www.elastic.co/docs/api/doc/elastic-cloud-serverless[Elastic Cloud Serverless REST API]. This API allows you to create, update, and delete projects, as well as manage project features and usage. + +[TIP] +==== +More APIs let you interact with data, capabilities, and settings inside of specific projects. Refer to the https://www.elastic.co/docs/api[Serverless API reference page]. +==== + +[discrete] +[[manage-project-with-api-api-principles]] +== API Principles + +* The Elastic Cloud REST API is built following REST principles: ++ +** Resources (such as projects) are represented as URIs. +** Standard HTTP response codes and verbs are used (GET, POST, PUT, PATCH and DELETE). +** API calls are stateless. Every request that you make happens in isolation from other calls and must include all the information necessary to fulfill the request. +* JSON is the data interchange format. + +[discrete] +[[manage-project-with-api-authentication]] +== Authentication + +API keys are used to authenticate requests to the Elastic Cloud REST API. +Learn how to https://www.elastic.co/guide/en/cloud/current/ec-api-authentication.html[create API keys]. + +You must provide the API key for all API requests in the `Authorization` header as follows: + +[source,bash] +---- +"Authorization: ApiKey $API_KEY" +---- + +For example, if you interact with the API using the `curl` command: + +[source,bash] +---- +curl -H "Authorization: ApiKey essu_..." https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch +---- + +[discrete] +[[manage-project-with-api-open-api-specification]] +== Open API Specification + +The Project Management API is documented using the https://en.wikipedia.org/wiki/OpenAPI_Specification[OpenAPI Specification]. The current supported version of the specification is `3.0`. + +For details, check the https://www.elastic.co/docs/api/doc/elastic-cloud-serverless[API reference] or download the https://www.elastic.co/docs/api/doc/elastic-cloud-serverless.yaml[OpenAPI Specification]. + +This specification can be used to generate client SDKs, or on tools that support it, such as the https://editor.swagger.io[Swagger Editor]. + +[discrete] +[[manage-project-with-api-examples]] +== Examples + +To try the examples in this section: + +. https://www.elastic.co/guide/en/cloud/current/ec-api-authentication.html[Create an API key]. +. Store the generated API key as an environment variable so that you don't need to specify it again for each request: ++ +[source,bash] +---- +export API_KEY="YOUR_GENERATED_API_KEY" +---- + +[discrete] +[[manage-project-with-api-create-a-serverless-elasticsearch-project]] +=== Create a serverless Elasticsearch project + +[source,bash] +---- +curl -H "Authorization: ApiKey $API_KEY" \ + -H "Content-Type: application/json" \ + "https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch" \ + -XPOST --data '{ + "name": "My project", <1> + "region_id": "aws-us-east-1" <2> + }' +---- + +<1> Replace **`My project`** with a more descriptive name in this call. + +<2> You can <>. + +The response from the create project request will include the created project details, such as the project ID, +the credentials to access the project, and the endpoints to access different apps such as Elasticsearch and Kibana. + +Example of `Create project` response: + +[source,json] +---- +{ + "id": "cace8e65457043698ed3d99da2f053f6", + "endpoints": { + "elasticsearch": "https://sample-project-c990cb.es.us-east-1.aws.elastic.cloud", + "kibana": "https://sample-project-c990cb-c990cb.kb.us-east-1.aws.elastic.cloud" + }, + "credentials": { + "username": "admin", + "password": "abcd12345" + } + (...) +} +---- + +You can store the project ID as an environment variable for the next requests: + +[source,bash] +---- +export PROJECT_ID=cace8e65457043698ed3d99da2f053f6 +---- + +[discrete] +[[manage-project-with-api-get-project]] +=== Get project + +You can retrieve your project details through an API call: + +[source,bash] +---- +curl -H "Authorization: ApiKey $API_KEY" \ + "https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch/${PROJECT_ID}" +---- + +[discrete] +[[manage-project-with-api-get-project-status]] +=== Get project status + +The 'status' endpoint indicates whether the project is initialized and ready to be used. In the response, the project's `phase` will change from "initializing" to "initialized" when it is ready: + +[source,bash] +---- +curl -H "Authorization: ApiKey $API_KEY" \ + "https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch/${PROJECT_ID}/status" +---- + +Example response: + +[source,json] +---- +{ + "phase":"initializing" +} +---- + +[discrete] +[[manage-project-with-api-reset-credentials]] +=== Reset Credentials + +If you lose the credentials provided at the time of the project creation, you can reset the credentials by using the following endpoint: + +[source,bash] +---- +curl -H "Authorization: ApiKey $API_KEY" \ + -XPOST \ + "https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch/${PROJECT_ID}/_reset-credentials" +---- + +[discrete] +[[manage-project-with-api-delete-project]] +=== Delete Project + +You can delete your project via the API: + +[source,bash] +---- +curl -XDELETE -H "Authorization: ApiKey $API_KEY" \ + "https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch/${PROJECT_ID}" +---- + +[discrete] +[[manage-project-with-api-update-project]] +=== Update Project + +You can update your project using a PATCH request. Only the fields included in the body of the request will be updated. + +[source,bash] +---- +curl -H "Authorization: ApiKey $API_KEY" \ + -H "Content-Type: application/json" \ + "https://api.elastic-cloud.com/api/v1/serverless/projects/elasticsearch/${PROJECT_ID}" \ + -XPATCH --data '{ + "name": "new name", + "alias": "new-project-alias" + }' +---- + +[discrete] +[[manage-project-with-api-list-available-regions]] +=== List available regions + +You can obtain the list of regions where projects can be created using the API: + +[source,bash] +---- +curl -H "Authorization: ApiKey $API_KEY" \ + "https://api.elastic-cloud.com/api/v1/serverless/regions" +---- diff --git a/serverless/pages/manage-your-project-rest-api.mdx b/serverless/pages/general/manage-your-project-rest-api.mdx similarity index 100% rename from serverless/pages/manage-your-project-rest-api.mdx rename to serverless/pages/general/manage-your-project-rest-api.mdx diff --git a/serverless/pages/general/manage-your-project.asciidoc b/serverless/pages/general/manage-your-project.asciidoc new file mode 100644 index 0000000000..941d5779a3 --- /dev/null +++ b/serverless/pages/general/manage-your-project.asciidoc @@ -0,0 +1,130 @@ +[[manage-project]] += Manage your projects + +:description: Configure project-wide features and usage. +:keywords: serverless, elasticsearch, project, manage + +preview:[] + +To manage a project: + +. Navigate to https://cloud.elastic.co/[cloud.elastic.co]. +. Log in to your Elastic Cloud account. +. Select your project from the **Serverless projects** panel and click **Manage**. + +From the project page, you can: + +* **Rename your project**. In the **Overview** section, click **Edit** next to the project's name. +* **Manage data and integrations**. Update your project data, including storage settings, indices, and data views, directly in your project. +* **Manage API keys**. Access your project and interact with its data programmatically using Elasticsearch APIs. +* **Manage members**. Add members and manage their access to this project or other resources of your organization. + +[discrete] +[[manage-project-search-ai-lake-settings]] +== Search AI Lake settings + +Once ingested, your data is stored in cost-efficient, general storage. A cache layer is available on top of the general storage for recent and frequently queried data that provides faster search speed. Data in this cache layer is considered **search-ready**. + +Together, these data storage layers form your project's **Search AI Lake**. + +The total volume of search-ready data is the sum of the following: + +. The volume of non-time series project data +. The volume of time series project data included in the Search Boost Window + +Each project type offers different settings that let you adjust the performance and volume of search-ready data, as well as the features available in your projects. + +|=== +| Setting | Description | Available in + +| **Search Power** +a| Search Power affects search speed by controlling the number of VCUs (Virtual Compute Units) allocated to search-ready data in the project. Additional VCUs provide more compute resources and result in performance gains. + +The **Cost-efficient** Search Power setting limits the available cache size, and generates cost savings by reducing search performance. + +The **Balanced** Search Power setting ensures that there is sufficient cache for all search-ready data, in order to respond quickly to queries. + +The **Performance** Search Power setting provides more computing resources in addition to the searchable data cache, in order to respond quickly to higher query volumes and more complex queries. +| + +| **Search Boost Window** +a| Non-time series data is always considered search-ready. The **Search Boost Window** determines the volume of time series project data that will be considered search-ready. + +Increasing the window results in a bigger portion of time series project data included in the total search-ready data volume. +| + +| **Data Retention** +a| Data retention policies determine how long your project data is retained. + +You can specify different retention periods for specific data streams in your project. +| + +| +a| **Maximum data retention period** + +When enabled, this setting determines the maximum length of time that data can be retained in any data streams of this project. + +Editing this setting replaces the data retention set for all data streams of the project that have a longer data retention defined. Data older than the new maximum retention period that you set is permanently deleted. +| + +| +a| **Default data retention period** + +When enabled, this setting determines the default retention period that is automatically applied to all data streams in your project that do not have a custom retention period already set. +| + +| **Project features** +| Controls <> for your {elastic-sec} project. +| +|=== + +[discrete] +[[project-features-add-ons]] +== Project features and add-ons + + For {elastic-sec} projects, edit the **Project features** to select a feature tier and enable add-on options for specific use cases. + +|=== +| Feature tier | Description and add-ons + +| **Security Analytics Essentials** +a| Standard security analytics, detections, investigations, and collaborations. Allows these add-ons: + +* **Endpoint Protection Essentials**: Endpoint protections with {elastic-defend}. +* **Cloud Protection Essentials**: Cloud native security features. + +| **Security Analytics Complete** +a| Everything in **Security Analytics Essentials** plus advanced features such as entity analytics, threat intelligence, and more. Allows these add-ons: + +* **Endpoint Protection Complete**: Everything in **Endpoint Protection Essentials** plus advanced endpoint detection and response features. +* **Cloud Protection Complete**: Everything in **Cloud Protection Essentials** plus advanced cloud security features. +|=== + +[discrete] +[[manage-project-downgrading-the-feature-tier]] +=== Downgrading the feature tier + +When you downgrade your Security project features selection from **Security Analytics Complete** to **Security Analytics Essentials**, the following features become unavailable: + +* All Entity Analytics features +* The ability to use certain entity analytics-related integration packages, such as: ++ +** Data Exfiltration detection +** Lateral Movement detection +** Living off the Land Attack detection +* Intelligence Indicators page +* External rule action connectors +* Case connectors +* Endpoint response actions history +* Endpoint host isolation exceptions +* AI Assistant +* Attack discovery + +And, the following data may be permanently deleted: + +* AI Assistant conversation history +* AI Assistant settings +* Entity Analytics user and host risk scores +* Entity Analytics asset criticality information +* Detection rule external connector settings +* Detection rule response action settings diff --git a/serverless/pages/manage-your-project.mdx b/serverless/pages/general/manage-your-project.mdx similarity index 97% rename from serverless/pages/manage-your-project.mdx rename to serverless/pages/general/manage-your-project.mdx index e766c32227..fb004da66e 100644 --- a/serverless/pages/manage-your-project.mdx +++ b/serverless/pages/general/manage-your-project.mdx @@ -26,7 +26,7 @@ From the project page, you can: ## Search AI Lake settings -Once ingested, your data is stored in cost-efficient, general storage. A cache layer is available on top of the general storage for recent and frequently queried data that provides faster search speed. Data in this cache layer is considered **search-ready**. +Once ingested, your data is stored in cost-efficient, general storage. A cache layer is available on top of the general storage for recent and frequently queried data that provides faster search speed. Data in this cache layer is considered **search-ready**. Together, these data storage layers form your project's **Search AI Lake**. @@ -55,13 +55,13 @@ Each project type offers different settings that let you adjust the performance **Search Power** - Search Power affects search speed by controlling the number of VCUs (Virtual Compute Units) allocated to search-ready data in the project. Additional VCUs provide more compute resources and result in performance gains. + Search Power affects search speed by controlling the number of VCUs (Virtual Compute Units) allocated to search-ready data in the project. Additional VCUs provide more compute resources and result in performance gains. The **Cost-efficient** Search Power setting limits the available cache size, and generates cost savings by reducing search performance. The **Balanced** Search Power setting ensures that there is sufficient cache for all search-ready data, in order to respond quickly to queries. - The **Performance** Search Power setting provides more computing resources in addition to the searchable data cache, in order to respond quickly to higher query volumes and more complex queries. + The **Performance** Search Power setting provides more computing resources in addition to the searchable data cache, in order to respond quickly to higher query volumes and more complex queries. @@ -72,7 +72,7 @@ Each project type offers different settings that let you adjust the performance **Search Boost Window** - Non-time series data is always considered search-ready. The **Search Boost Window** determines the volume of time series project data that will be considered search-ready. + Non-time series data is always considered search-ready. The **Search Boost Window** determines the volume of time series project data that will be considered search-ready. Increasing the window results in a bigger portion of time series project data included in the total search-ready data volume. @@ -129,7 +129,7 @@ Each project type offers different settings that let you adjust the performance - +
@@ -166,12 +166,12 @@ Each project type offers different settings that let you adjust the performance -### Downgrading the feature tier +### Downgrading the feature tier When you downgrade your Security project features selection from **Security Analytics Complete** to **Security Analytics Essentials**, the following features become unavailable: * All Entity Analytics features -* The ability to use certain entity analytics-related integration packages, such as: +* The ability to use certain entity analytics-related integration packages, such as: * Data Exfiltration detection * Lateral Movement detection * Living off the Land Attack detection diff --git a/serverless/pages/general/service-status.asciidoc b/serverless/pages/general/service-status.asciidoc new file mode 100644 index 0000000000..1c35af754e --- /dev/null +++ b/serverless/pages/general/service-status.asciidoc @@ -0,0 +1,27 @@ +[[serverless-status]] += Monitor serverless status + +:keywords: serverless + +Serverless projects run on cloud platforms, which may undergo changes in availability. +When availability changes, Elastic makes sure to provide you with a current service status. + +To check current and past service availability, go to the Elastic serverless https://serverless-preview-status.statuspage.io/[service status] page. + +[discrete] +[[serverless-status-subscribe-to-updates]] +== Subscribe to updates + +You can be notified about changes to the service status automatically. + +To receive service status updates: + +. Go to the Elastic serverless https://serverless-preview-status.statuspage.io/[service status] page. +. Select **SUBSCRIBE TO UPDATES**. +. You can be notified in the following ways: ++ +** Email +** Slack +** Atom or RSS feeds + +After you subscribe, you'll be notified whenever a service status update is posted. diff --git a/serverless/pages/service-status.mdx b/serverless/pages/general/service-status.mdx similarity index 100% rename from serverless/pages/service-status.mdx rename to serverless/pages/general/service-status.mdx diff --git a/serverless/pages/general/sign-up.asciidoc b/serverless/pages/general/sign-up.asciidoc new file mode 100644 index 0000000000..5faf81031d --- /dev/null +++ b/serverless/pages/general/sign-up.asciidoc @@ -0,0 +1,97 @@ +[[sign-up-trial]] += Get started with serverless + +:description: Information about signing up for a serverless Elastic Cloud trial +:keywords: serverless, general, signup + +There are two options to create serverless projects: + +* If you are an existing customer, https://cloud.elastic.co/login[log in to Elastic Cloud]. On the home page, you will see a new option to create serverless projects. Note that if you are already subscribed to Elastic Cloud, there is no specific trial for serverless projects. +* If you are a new user, you can https://cloud.elastic.co/serverless-registration[sign up for a free 14-day trial], and you will be able to launch a serverless project. + +[discrete] +[[sign-up-trial-what-is-included-in-my-trial]] +== What is included in my trial? + +Your free 14-day trial includes: + +**One hosted deployment** + +A deployment lets you explore Elastic solutions for Search, Observability, and Security. Trial deployments run on the latest version of the Elastic Stack. They includes 8 GB of RAM spread out over two availability zones, and enough storage space to get you started. If you’re looking to evaluate a smaller workload, you can scale down your trial deployment. +Each deployment includes Elastic features such as Maps, SIEM, machine learning, advanced security, and much more. You have some sample data sets to play with and tutorials that describe how to add your own data. + +**One serverless project** + +Serverless projects package Elastic Stack features by type of solution: + +* https://www.elastic.co/docs/current/serverless/elasticsearch/what-is-elasticsearch-serverless[Elasticsearch] +* https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[Observability] +* https://www.elastic.co/docs/current/serverless/security/what-is-security-serverless[Security] + +When you create a project, you select the project type applicable to your use case, so only the relevant and impactful applications and features are easily accessible to you. + +[NOTE] +==== +During the trial period, you are limited to one active hosted deployment and one active serverless project at a time. When you subscribe, you can create additional deployments and projects. +==== + +[discrete] +[[sign-up-trial-what-limits-are-in-place-during-a-trial]] +== What limits are in place during a trial? + +During the free 14 day trial, Elastic provides access to one hosted deployment and one serverless project. If all you want to do is try out Elastic, the trial includes more than enough to get you started. During the trial period, some limitations apply. + +**Hosted deployments** + +* You can have one active deployment at a time +* The deployment size is limited to 8GB RAM and approximately 360GB of storage, depending on the specified hardware profile +* Machine learning nodes are available up to 4GB RAM +* Custom Elasticsearch plugins are not enabled + +**Serverless projects** + +* You can have one active serverless project at a time. +* Search Power is limited to 100. This setting only exists in Elasticsearch projects +* Search Boost Window is limited to 7 days. This setting only exists in Elasticsearch projects + +**How to remove restrictions?** + +To remove limitations, subscribe to https://www.elastic.co/guide/en/cloud/current/ec-billing-details.html[Elastic Cloud]. Elastic Cloud subscriptions include the following benefits: + +* Increased memory or storage for deployment components, such as Elasticsearch clusters, machine learning nodes, and APM server. +* As many deployments and projects as you need. +* Third availability zone for your deployments. +* Access to additional features, such as cross-cluster search and cross-cluster replication. + +You can subscribe to Elastic Cloud at any time during your trial. /serverless/general/serverless-billing[Billing] starts when you subscribe. To maximize the benefits of your trial, subscribe at the end of the free period. To monitor charges, anticipate future costs, and adjust your usage, check your https://www.elastic.co/guide/en/cloud/current/ec-account-usage.html[account usage] and https://www.elastic.co/guide/en/cloud/current/ec-billing-history.html[billing history]. + +[discrete] +[[sign-up-trial-how-do-i-get-started-with-my-trial]] +== How do I get started with my trial? + +Start by checking out some common approaches for https://www.elastic.co/guide/en/cloud/current/ec-cloud-ingest-data.html#ec-ingest-methods[moving data into Elastic Cloud]. + +[discrete] +[[sign-up-trial-what-happens-at-the-end-of-the-trial]] +== What happens at the end of the trial? + +When your trial expires, the deployment and project that you created during the trial period are suspended until you subscribe to https://www.elastic.co/guide/en/cloud/current/ec-billing-details.html[Elastic Cloud]. When you subscribe, you are able to resume your deployment and serverless project, and regain access to the ingested data. After your trial expires, you have 30 days to subscribe. After 30 days, your deployment, serverless project, and ingested data are permanently deleted. + +If you’re interested in learning more ways to subscribe to Elastic Cloud, don’t hesitate to https://www.elastic.co/contact[contact us]. + +[discrete] +[[sign-up-trial-how-do-i-sign-up-through-a-marketplace]] +== How do I sign up through a marketplace? + +If you’re interested in consolidated billing, subscribe from the AWS Marketplace, which allows you to skip the trial period and connect your AWS Marketplace email to your unique Elastic account. + +[NOTE] +==== +Serverless projects are only available for AWS Marketplace. Support for GCP Marketplace and Azure Marketplace will be added in the near future. +==== + +[discrete] +[[sign-up-trial-how-do-i-get-help]] +== How do I get help? + +We’re here to help. If you have any questions, reach out to https://cloud.elastic.co/support[Support]. diff --git a/serverless/pages/sign-up.mdx b/serverless/pages/general/sign-up.mdx similarity index 100% rename from serverless/pages/sign-up.mdx rename to serverless/pages/general/sign-up.mdx diff --git a/serverless/pages/general/user-profile.asciidoc b/serverless/pages/general/user-profile.asciidoc new file mode 100644 index 0000000000..ca593c45a3 --- /dev/null +++ b/serverless/pages/general/user-profile.asciidoc @@ -0,0 +1,56 @@ +[[user-profile]] += Update your user profile + +:description: Manage your profile settings. +:keywords: serverless, general, profile, update + +preview:[] + +To edit your user profile, go to the user icon on the header bar and select **Profile**. + +[discrete] +[[user-profile-update-your-email-address]] +== Update your email address + +Your email address is used to sign in. If needed, you can change this email address. + +. In the **Profile** section, by **Email address**, select **Edit**. +. Enter a new email address and your current password. ++ +An email is sent to the new address with a link to confirm the change. If you don't get the email after a few minutes, check your spam folder. + +[discrete] +[[user-profile-change-your-password]] +== Change your password + +When you signed up with your email address, you selected a password that you use to log in to the Elastic Cloud console. If needed, you can change this password. + +If you know your current password: + +. Navigate to the **Password** section and select **Change password**. +. Enter the current password and provide the new password that you want to use. + +If you don't know your current password: + +. At the login screen for the Elastic Cloud console, select the link **Forgot password?** +. Enter the email address for your account and select **Reset password**. ++ +An email is sent to the address you specified with a link to reset the password. If you don't get the email after a few minutes, check your spam folder. + +[discrete] +[[user-profile-enable-multi-factor-authentication]] +== Enable multi-factor authentication + +To add an extra layer of security, you can either set up Google authenticator or text messaging on a mobile device. + +[TIP] +==== +Before you start using multi-factor authentication, verify that your device has SMS capabilities or download the Google Authenticator application onto your device. +==== + +To enable multi-factor authentication, you must enroll your device. + +. Navigate to the **Multi-factor authentication** section. +. Select **Configure** to enable the Authenticator app or **Add a phone number** to enable the Text message. + +If the device you want to remove is your only enrolled device, you must disable multi-factor authentication first. If your device is lost or stolen, contact https://support.elastic.co/[support]. diff --git a/serverless/pages/user-profile.mdx b/serverless/pages/general/user-profile.mdx similarity index 100% rename from serverless/pages/user-profile.mdx rename to serverless/pages/general/user-profile.mdx diff --git a/serverless/pages/general/what-is-serverless.asciidoc b/serverless/pages/general/what-is-serverless.asciidoc new file mode 100644 index 0000000000..a238bf3031 --- /dev/null +++ b/serverless/pages/general/what-is-serverless.asciidoc @@ -0,0 +1,137 @@ +[[what-is-serverless-elastic]] += What is serverless Elastic? + +:keywords: serverless + +Serverless projects use the core components of the {stack}, such as {es} and {kib}, and are based on https://www.elastic.co/blog/elastic-serverless-architecture[an architecture that +decouples compute and storage]. Search and indexing operations are separated, which offers high flexibility for scaling your workloads while ensuring +a high level of performance. + +**Management free.** Elastic manages the underlying Elastic cluster, so you can focus on your data. With serverless projects, Elastic is responsible for automatic upgrades, data backups, +and business continuity. + +**Autoscaled.** To meet your performance requirements, the system automatically adjusts to your workloads. For example, when you have a short time spike on the +data you ingest, more resources are allocated for that period of time. When the spike is over, the system uses less resources, without any action +on your end. + +**Optimized data storage.** Your data is stored in cost-efficient, general storage. A cache layer is available on top of the general storage for recent and frequently queried data that provides faster search speed. +The size of the cache layer and the volume of data it holds depend on https://www.elastic.co/docs/current/serverless/elasticsearch/manage-project[settings] that you can configure for each project. + +**Dedicated experiences.** All serverless solutions are built on the Elastic Search Platform and include the core capabilities of the Elastic Stack. They also each offer a distinct experience and specific capabilities that help you focus on your data, goals, and use cases. + +**Pay per usage.** Each serverless project type includes product-specific and usage-based pricing. + +.Serverless billing starts June 1, 2024 +[IMPORTANT] +==== +Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the https://cloud.elastic.co/billing/usage?[Billing Usage page]. Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. +==== + +[discrete] +[[what-is-serverless-elastic-control-your-data-and-performance]] +== Control your data and performance + +Control your project data and query performance against your project data. + +**Data.** Choose the data you want to ingest, and the method to ingest it. By default, data is stored indefinitely in your project, +and you define the retention settings for your data streams. + +**Performance.** For granular control over costs and query performance against your project data, serverless projects come with a set of predefined https://www.elastic.co/docs/current/serverless/elasticsearch/manage-project[settings] that you can edit. + +.Some or all of these settings may not be available for all types of serverless projects. +[NOTE] +==== + +==== + +[discrete] +[[what-is-serverless-elastic-differences-between-serverless-projects-and-hosted-deployments-on-ecloud]] +== Differences between serverless projects and hosted deployments on {ecloud} + +You can run https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[hosted deployments] of the {stack} on {ecloud}. These hosted deployments provide more provisioning and advanced configuration options. + +|=== +| Option| Serverless| Hosted + +| **Cluster management** +| Fully managed by Elastic. +| You provision and manage your hosted clusters. Shared responsibility with Elastic. + +| **Scaling** +| Autoscales out of the box. +| Manual scaling or autoscaling available for you to enable. + +| **Upgrades** +| Automatically performed by Elastic. +| You choose when to upgrade. + +| **Pricing** +| Individual per project type and based on your usage. +| Based on deployment size and subscription level. + +| **Performance** +| Autoscales based on your usage. +| Manual scaling. + +| **Solutions** +| Single solution per project. +| Full Elastic Stack per deployment. + +| **User management** +| Elastic Cloud-managed users. +| Elastic Cloud-managed users and native Kibana users. + +| **API support** +| Subset of https://www.elastic.co/docs/api[APIs]. +| All Elastic APIs. + +| **Backups** +| Projects automatically backed up by Elastic. +| Your responsibility with Snapshot & Restore. + +| **Data retention** +| Editable on data streams. +| Index Lifecycle Management. +|=== + +[discrete] +[[what-is-serverless-elastic-answers-to-common-serverless-questions]] +== Answers to common serverless questions + +**What Support is available for the serverless preview?** + +There is no official SLA for Support in Serverless until General Availability (GA). We’ll do our best to service customers and inquiries as we would any pre-GA product - at a Platinum/Enterprise Severity 3 (1 business day) SLA target. + +**Is there migration support between hosted deployments and serverless projects?** + +Migration paths between hosted deployments and serverless projects are currently unsupported. + +**How can I move data to or from serverless projects?** + +We are working on data migration tools! In the interim, you can https://www.elastic.co/docs/current/serverless/elasticsearch/ingest-data-through-logstash[use Logstash] with Elasticsearch input and output plugins to move data to and from serverless projects. + +**How does serverless ensure compatibility between software versions?** + +Connections and configurations are unaffected by upgrades. To ensure compatibility between software versions, quality testing and API versioning are used. + +**Can I convert a serverless project into a hosted deployment, or a hosted deployment into a serverless project?** + +Projects and deployments are based on different architectures, and you are unable to convert. + +**Can I convert a serverless project into a project of a different type?** + +You are unable to convert projects into different project types, but you can create as many projects as you’d like. You will be charged only for your usage. + +**How can I create serverless service accounts?** + +Create API keys for service accounts in your serverless projects. Options to automate the creation of API keys with tools such as Terraform will be available in the future. + +To raise a Support case with Elastic, raise a case for your subscription the same way you do today. In the body of the case, make sure to mention you are working in serverless to ensure we can provide the appropriate support. + +**Where can I learn about pricing for serverless?** + +See serverless pricing information for https://www.elastic.co/pricing/serverless-search[Search], https://www.elastic.co/pricing/serverless-observability[Observability], and https://www.elastic.co/pricing/serverless-security[Security]. + +**Can I request backups or restores for my projects?** + +It is not currently possible to request backups or restores for projects, but we are working on data migration tools to better support this. diff --git a/serverless/pages/what-is-serverless.mdx b/serverless/pages/general/what-is-serverless.mdx similarity index 100% rename from serverless/pages/what-is-serverless.mdx rename to serverless/pages/general/what-is-serverless.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.asciidoc new file mode 100644 index 0000000000..64066dd208 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.asciidoc @@ -0,0 +1,134 @@ += Classify text + +:description: NLP tasks that classify input text or determine the language of text. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +These NLP tasks enable you to identify the language of text and classify or +label unstructured input text: + +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/ootb-models/lang-ident[] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/classify-text[Text classification] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/classify-text[Zero-shot text classification] + +[discrete] +[[text-classification]] +== Text classification + +Text classification assigns the input text to one of multiple classes that best +describe the text. The classes used depend on the model and the data set that +was used to train it. Based on the number of classes, two main types of +classification exist: binary classification, where the number of classes is +exactly two, and multi-class classification, where the number of classes is more +than two. + +This task can help you analyze text for markers of positive or negative +sentiment or classify text into various topics. For example, you might use a +trained model to perform sentiment analysis and determine whether the following +text is "POSITIVE" or "NEGATIVE": + +[source,js] +---- +{ + docs: [{"text_field": "This was the best movie I’ve seen in the last decade!"}] +} +... +---- + +// NOTCONSOLE + +Likewise, you might use a trained model to perform multi-class classification +and determine whether the following text is a news topic related to "SPORTS", +"BUSINESS", "LOCAL", or "ENTERTAINMENT": + +[source,js] +---- +{ + docs: [{"text_field": "The Blue Jays played their final game in Toronto last night and came out with a win over the Yankees, highlighting just how far the team has come this season."}] +} +... +---- + +// NOTCONSOLE + +[discrete] +[[zero-shot-text-classification]] +== Zero-shot text classification + +The zero-shot classification task offers the ability to classify text without +training a model on a specific set of classes. Instead, you provide the classes +when you deploy the model or at {infer} time. It uses a model trained on a +large data set that has gained a general language understanding and asks the +model how well the labels you provided fit with your text. + +This task enables you to analyze and classify your input text even when you +don't have sufficient training data to train a text classification model. + +For example, you might want to perform multi-class classification and determine +whether a news topic is related to "SPORTS", "BUSINESS", "LOCAL", or +"ENTERTAINMENT". However, in this case the model is not trained specifically for +news classification; instead, the possible labels are provided together with the +input text at {infer} time: + +[source,js] +---- +{ + docs: [{"text_field": "The S&P 500 gained a meager 12 points in the day’s trading. Trade volumes remain consistent with those of the past week while investors await word from the Fed about possible rate increases."}], + "inference_config": { + "zero_shot_classification": { + "labels": ["SPORTS", "BUSINESS", "LOCAL", "ENTERTAINMENT"] + } + } +} +---- + +// NOTCONSOLE + +The task returns the following result: + +[source,js] +---- +... +{ + "predicted_value": "BUSINESS" + ... +} +... +---- + +// NOTCONSOLE + +You can use the same model to perform {infer} with different classes, such as: + +[source,js] +---- +{ + docs: [{"text_field": "Hello support team. I’m writing to inquire about the possibility of sending my broadband router in for repairs. The internet is really slow and the router keeps rebooting! It’s a big problem because I’m in the middle of binge-watching The Mandalorian!"}] + "inference_config": { + "zero_shot_classification": { + "labels": ["urgent", "internet", "phone", "cable", "mobile", "tv"] + } + } +} +---- + +// NOTCONSOLE + +The task returns the following result: + +[source,js] +---- +... +{ + "predicted_value": ["urgent", "internet", "tv"] + ... +} +... +---- + +// NOTCONSOLE + +Since you can adjust the labels while you perform {infer}, this type of task is +exceptionally flexible. If you are consistently using the same labels, however, +it might be better to use a fine-tuned text classification model. diff --git a/serverless/pages/explore-your-data-ml-nlp-classify-text.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-classify-text.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.asciidoc new file mode 100644 index 0000000000..dc71f9c26c --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.asciidoc @@ -0,0 +1,81 @@ += Deploy the model in your cluster + +:description: Description to be written + +preview:[] + +After you import the model and vocabulary, you can use {kib} to view and +manage their deployment across your cluster under **{ml-app}** → +**Model Management**. Alternatively, you can use the +{ref}/start-trained-model-deployment.html[start trained model deployment API]. + +You can deploy a model multiple times by assigning a unique deployment ID when +starting the deployment. It enables you to have dedicated deployments for +different purposes, such as search and ingest. By doing so, you ensure that the +search speed remains unaffected by ingest workloads, and vice versa. Having +separate deployments for search and ingest mitigates performance issues +resulting from interactions between the two, which can be hard to diagnose. + +[role="screenshot"] +image::images/ml-nlp-deployment-id.png["Model deployment on the Trained Models UI."] + +It is recommended to fine-tune each deployment based on its specific purpose. To +improve ingest performance, increase throughput by adding more allocations to +the deployment. For improved search speed, increase the number of threads per +allocation. + +[NOTE] +==== +Since eland uses APIs to deploy the models, you cannot see the models in +{kib} until the saved objects are synchronized. You can follow the prompts in +{kib}, wait for automatic synchronization, or use the +{kibana-ref}/machine-learning-api-sync.html[sync {ml} saved objects API]. +==== + +When you deploy the model, its allocations are distributed across available {ml} +nodes. Model allocations are independent units of work for NLP tasks. To +influence model performance, you can configure the number of allocations and the +number of threads used by each allocation of your deployment. + +Throughput can be scaled by adding more allocations to the deployment; it +increases the number of {infer} requests that can be performed in parallel. All +allocations assigned to a node share the same copy of the model in memory. The +model is loaded into memory in a native process that encapsulates `libtorch`, +which is the underlying {ml} library of PyTorch. The number of allocations +setting affects the amount of model allocations across all the {ml} nodes. Model +allocations are distributed in such a way that the total number of used threads +does not exceed the allocated processors of a node. + +The threads per allocation setting affects the number of threads used by each +model allocation during {infer}. Increasing the number of threads generally +increases the speed of {infer} requests. The value of this setting must not +exceed the number of available allocated processors per node. + +You can view the allocation status in {kib} or by using the +{ref}/get-trained-models-stats.html[get trained model stats API]. If you want to +change the number of allocations, you can use the +{ref}/update-trained-model-deployment.html[update trained model stats API] +after the allocation status is `started`. + +[discrete] +[[request-queues-and-search-priority]] +== Request queues and search priority + +Each allocation of a model deployment has a dedicated queue to buffer {infer} +requests. The size of this queue is determined by the `queue_capacity` parameter +in the +{ref}/start-trained-model-deployment.html[start trained model deployment API]. +When the queue reaches its maximum capacity, new requests are declined until +some of the queued requests are processed, creating available capacity once +again. When multiple ingest pipelines reference the same deployment, the queue +can fill up, resulting in rejected requests. Consider using dedicated +deployments to prevent this situation. + +{infer-cap} requests originating from search, such as the +{ref}/query-dsl-sparse-vector-query.html[`sparse_vector` query], have a higher +priority compared to non-search requests. The {infer} ingest processor generates +normal priority requests. If both a search query and an ingest processor use the +same deployment, the search requests with higher priority skip ahead in the +queue for processing before the lower priority ingest requests. This +prioritization accelerates search responses while potentially slowing down +ingest where response time is less critical. diff --git a/serverless/pages/explore-your-data-ml-nlp-deploy-model.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-deploy-model.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc new file mode 100644 index 0000000000..2ab65e9039 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc @@ -0,0 +1,16 @@ += Deploy trained models + +:description: You can import trained models into your cluster and configure them for specific NLP tasks. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +If you want to perform {nlp} tasks in your cluster, you must deploy an +appropriate trained model. There is tooling support in +https://github.com/elastic/eland[Eland] and {kib} to help you prepare and +manage models. + +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/select-model[Select a trained model]. +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/import-model[Import the trained model and vocabulary]. +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/deploy-model[Deploy the model in your cluster]. +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/try-it-out[Try it out]. diff --git a/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-elser.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-elser.asciidoc new file mode 100644 index 0000000000..24a55adbb6 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-elser.asciidoc @@ -0,0 +1,169 @@ += ELSER – Elastic Learned Sparse EncodeR + +:description: ELSER is a learned sparse ranking model trained by Elastic. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +Elastic Learned Sparse EncodeR - or ELSER - is a retrieval model trained by +Elastic that enables you to perform +{ref}/semantic-search-elser.html[semantic search] to retrieve more relevant +search results. This search type provides you search results based on contextual +meaning and user intent, rather than exact keyword matches. + +ELSER is an out-of-domain model which means it does not require fine-tuning on +your own data, making it adaptable for various use cases out of the box. + +ELSER expands the indexed and searched passages into collections of terms that +are learned to co-occur frequently within a diverse set of training data. The +terms that the text is expanded into by the model _are not_ synonyms for the +search terms; they are learned associations. These expanded terms are weighted +as some of them are more significant than others. Then the {es} +{ref}/rank-feature.html[rank-feature field type] is used to store the terms +and weights at index time, and to search against later. + +[discrete] +[[requirements]] +== Requirements + +To use ELSER, you must have the {subscriptions}[appropriate subscription] level +for semantic search or the trial period activated. + +[discrete] +[[benchmarks]] +== Benchmarks + +The following sections provide information about how ELSER performs on different +hardwares and compares the model performance to {es} BM25 and other strong +baselines such as Splade or OpenAI. + +[discrete] +[[hardware-benchmarks]] +=== Hardware benchmarks + +Two data sets were utilized to evaluate the performance of ELSER in different +hardware configurations: `msmarco-long-light` and `arguana`. + +|=== +| | | | + +| **Data set** +| **Data set size** +| **Average count of tokens / query** +| **Average count of tokens / document** + +| `msmarco-long-light` +| 37367 documents +| 9 +| 1640 + +| `arguana` +| 8674 documents +| 238 +| 202 +|=== + +The `msmarco-long-light` data set contains long documents with an average of +over 512 tokens, which provides insights into the performance implications +of indexing and {infer} time for long documents. This is a subset of the +"msmarco" dataset specifically designed for document retrieval (it shouldn't be +confused with the "msmarco" dataset used for passage retrieval, which primarily +consists of shorter spans of text). + +The `arguana` data set is a https://github.com/beir-cellar/beir[BEIR] data set. +It consists of long queries with an average of 200 tokens per query. It can +represent an upper limit for query slowness. + +The table below present benchmarking results for ELSER using various hardware +configurations. + +[discrete] +[[qualitative-benchmarks]] +=== Qualitative benchmarks + +The metric that is used to evaluate ELSER's ranking ability is the Normalized +Discounted Cumulative Gain (NDCG) which can handle multiple relevant documents +and fine-grained document ratings. The metric is applied to a fixed-sized list +of retrieved documents which, in this case, is the top 10 documents (NDCG@10). + +The table below shows the performance of ELSER compared to {es} BM25 with an +English analyzer broken down by the 12 data sets used for the evaluation. ELSER +has 10 wins, 1 draw, 1 loss and an average improvement in NDCG@10 of 17%. + +_NDCG@10 for BEIR data sets for BM25 and ELSER - higher values are better)_ + +The following table compares the average performance of ELSER to some other +strong baselines. The OpenAI results are separated out because they use a +different subset of the BEIR suite. + +_Average NDCG@10 for BEIR data sets vs. various high quality baselines (higher_ +_is better). OpenAI chose a different subset, ELSER results on this set_ +_reported separately._ + +To read more about the evaluation details, refer to +https://www.elastic.co/blog/may-2023-launch-information-retrieval-elasticsearch-ai-model[this blog post]. + +[discrete] +[[download-and-deploy-elser]] +== Download and deploy ELSER + +You can download and deploy ELSER either from **Trained Models** or by using the +Dev Console. + +[discrete] +[[using-the-trained-models-page]] +=== Using the Trained Models page + +. In {kib}, navigate to **Trained Models**. ELSER can be found +in the list of trained models. +. Click the **Download model** button under **Actions**. You can check the +download status on the **Notifications** page. ++ + +. After the download is finished, start the deployment by clicking the +**Start deployment** button. +. Provide a deployment ID, select the priority, and set the number of +allocations and threads per allocation values. ++ + +. Click Start. + +[discrete] +[[using-the-dev-console]] +=== Using the Dev Console + +. Navigate to the **Dev Console**. +. Create the ELSER model configuration by running the following API call: ++ +[source,console] +---- +PUT _ml/trained_models/.elser_model_1 +{ +"input": { + "field_names": ["text_field"] +} + +---- ++ +The API call automatically initiates the model download if the model is not +downloaded yet. +. Deploy the model by using the +{ref}/start-trained-model-deployment.html[start trained model deployment API] +with a delpoyment ID: ++ +[source,console] +---- +POST _ml/trained_models/.elser_model_1/deployment/_start?deployment_id=for_search +---- ++ +You can deploy the model multiple times with different deployment IDs. + +After the deployment is complete, ELSER is ready to use either in an ingest +pipeline or in a `sparse_vector` query to perform semantic search. + +[discrete] +[[further-reading]] +== Further reading + +* {ref}/semantic-search-elser.html[Perform semantic search with ELSER] +* https://www.elastic.co/blog/may-2023-launch-information-retrieval-elasticsearch-ai-model[Improving information retrieval in the Elastic Stack: Introducing Elastic Learned Sparse Encoder, our new retrieval model] diff --git a/serverless/pages/explore-your-data-ml-nlp-elser.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-elser.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-elser.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-elser.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc new file mode 100644 index 0000000000..edc2621c57 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc @@ -0,0 +1,11 @@ += Examples + +:description: Description to be written + +preview:[] + +The following pages contain end-to-end examples of how to use the different +{nlp} tasks in the {stack}. + +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/examples/ner[How to deploy named entity recognition] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/examples/text-embedding-vector-search[How to deploy a text embedding model and use it for semantic search] diff --git a/serverless/pages/explore-your-data-ml-nlp-examples.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-examples.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-examples.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-examples.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.asciidoc new file mode 100644 index 0000000000..5b63e0484a --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.asciidoc @@ -0,0 +1,145 @@ += Extract information + +:description: NLP tasks that extract information from unstructured text. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +These NLP tasks enable you to extract information from your unstructured text: + +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/extract-info[Named entity recognition] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/extract-info[Fill-mask] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/extract-info[Question answering] + +[discrete] +[[named-entity-recognition]] +== Named entity recognition + +The named entity recognition (NER) task can identify and categorize certain +entities - typically proper nouns - in your unstructured text. Named entities +usually refer to objects in the real world such as persons, locations, +organizations, and other miscellaneous entities that are consistently referenced +by a proper name. + +NER is a useful tool to identify key information, add structure and gain +insight into your content. It's particularly useful while processing and +exploring large collections of text such as news articles, wiki pages or +websites. It makes it easier to understand the subject of a text and group +similar pieces of content together. + +In the following example, the short text is analyzed for any named entity and +the model extracts not only the individual words that make up the entities, but +also phrases, consisting of multiple words. + +[source,js] +---- +{ + "docs": [{"text_field": "Elastic is headquartered in Mountain View, California."}] +} +... +---- + +// NOTCONSOLE + +The task returns the following result: + +[source,js] +---- +{ + "inference_results": [{ + ... + entities: [ + { + "entity": "Elastic", + "class": "organization" + }, + { + "entity": "Mountain View", + "class": "location" + }, + { + "entity": "California", + "class": "location" + } + ] + } + ] +} +... +---- + +// NOTCONSOLE + +[discrete] +[[fill-mask]] +== Fill-mask + +The objective of the fill-mask task is to predict a missing word from a text +sequence. The model uses the context of the masked word to predict the most +likely word to complete the text. + +The fill-mask task can be used to quickly and easily test your model. + +In the following example, the special word “[MASK]” is used as a placeholder to +tell the model which word to predict. + +[source,js] +---- +{ + docs: [{"text_field": "The capital city of France is [MASK]."}] +} +... +---- + +// NOTCONSOLE + +The task returns the following result: + +[source,js] +---- +... +{ + "predicted_value": "Paris" + ... +} +... +---- + +// NOTCONSOLE + +[discrete] +[[question-answering]] +== Question answering + +The question answering (or extractive question answering) task makes it possible +to get answers to certain questions by extracting information from the provided +text. + +The model tokenizes the string of – usually long – unstructured text, then it +attempts to pull an answer for your question from the text. The logic is +shown by the following examples: + +[source,js] +---- +{ + "docs": [{"text_field": "The Amazon rainforest (Portuguese: Floresta Amazônica or Amazônia; Spanish: Selva Amazónica, Amazonía or usually Amazonia; French: Forêt amazonienne; Dutch: Amazoneregenwoud), also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America. This basin encompasses 7,000,000 square kilometres (2,700,000 sq mi), of which 5,500,000 square kilometres (2,100,000 sq mi) are covered by the rainforest. This region includes territory belonging to nine nations. The majority of the forest is contained within Brazil, with 60% of the rainforest, followed by Peru with 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador, Bolivia, Guyana, Suriname and French Guiana. States or departments in four nations contain "Amazonas" in their names. The Amazon represents over half of the planet's remaining rainforests, and comprises the largest and most biodiverse tract of tropical rainforest in the world, with an estimated 390 billion individual trees divided into 16,000 species."}], + "inference_config": {"question_answering": {"question": "Which name is also used to describe the Amazon rainforest in English?"}} +} +... +---- + +// NOTCONSOLE + +The answer is shown by the object below: + +[source,js] +---- +... +{ + "predicted_value": "Amazonia" + ... +} +... +---- + +// NOTCONSOLE diff --git a/serverless/pages/explore-your-data-ml-nlp-extract-info.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-extract-info.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-import-model.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-import-model.asciidoc new file mode 100644 index 0000000000..b08fdcd82b --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-import-model.asciidoc @@ -0,0 +1,133 @@ += Import the trained model and vocabulary + +:keywords: serverless, elasticsearch, tbd + +preview:[] + +[IMPORTANT] +==== +If you want to install a trained model in a restricted or closed +network, refer to +{eland-docs}/machine-learning.html#ml-nlp-pytorch-air-gapped[these instructions]. +==== + +After you choose a model, you must import it and its tokenizer vocabulary to +your cluster. When you import the model, it must be chunked and imported one +chunk at a time for storage in parts due to its size. + +[NOTE] +==== +Trained models must be in a TorchScript representation for use with +{stack-ml-features}. +==== + +https://github.com/elastic/eland[Eland] is an {es} Python client that +provides a simple script to perform the conversion of Hugging Face transformer +models to their TorchScript representations, the chunking process, and upload to +{es}; it is therefore the recommended import method. You can either install +the Python Eland client on your machine or use a Docker image to build Eland and +run the model import script. + +[discrete] +[[import-with-the-eland-client-installed]] +== Import with the Eland client installed + +. Install the {eland-docs}/installation.html[Eland Python client] with +PyTorch extra dependencies. ++ +[source,shell] +---- +python -m pip install 'eland[pytorch]' +---- ++ +// NOTCONSOLE +. Run the `eland_import_hub_model` script to download the model from Hugging +Face, convert it to TorchScript format, and upload to the {es} cluster. +For example: ++ +// NOTCONSOLE ++ +[source,shell] +---- +eland_import_hub_model +-u -p \ <2> +---- ++ +<1> Specify the Elastic Cloud identifier. Alternatively, use `--url`. ++ +<2> Provide authentication details to access your cluster. Refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/import-model[Authentication methods] to learn more. ++ +<3> Specify the identifier for the model in the Hugging Face model hub. ++ +<4> Specify the type of NLP task. Supported values are `fill_mask`, `ner`, +`text_classification`, `text_embedding`, and `zero_shot_classification`. + +For more details, refer to +https://www.elastic.co/guide/en/elasticsearch/client/eland/current/machine-learning.html#ml-nlp-pytorch[https://www.elastic.co/guide/en/elasticsearch/client/eland/current/machine-learning.html#ml-nlp-pytorch]. + +[discrete] +[[import-with-docker]] +== Import with Docker + +[IMPORTANT] +==== +To use the Docker container, you need to clone the Eland repository: +https://github.com/elastic/eland[https://github.com/elastic/eland] +==== + +If you want to use Eland without installing it, clone the Eland repository and +from the root directory run the following to build the Docker image: + +[source,bash] +---- +$ docker build -t elastic/eland . +---- + +You can now use the container interactively: + +[source,bash] +---- +$ docker run -it --rm --network host elastic/eland +---- + +The `eland_import_hub_model` script can be run directly in the docker command: + +[source,bash] +---- +docker run -it --rm elastic/eland \ + eland_import_hub_model \ + --url $ELASTICSEARCH_URL \ + --hub-model-id elastic/distilbert-base-uncased-finetuned-conll03-english \ + --start +---- + +Replace the `$ELASTICSEARCH_URL` with the URL for your {es} cluster. Refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/import-model[Authentication methods] +to learn more. + +[discrete] +[[authentication-methods]] +== Authentication methods + +The following authentication options are available when using the import script: + +* username/password authentication (specified with the `-u` and `-p` options): + +[source,bash] +---- +eland_import_hub_model --url https://: -u -p ... +---- + +* username/password authentication (embedded in the URL): ++ +[source,bash] +---- +eland_import_hub_model --url https://:@: ... +---- +* API key authentication: ++ +[source,bash] +---- +eland_import_hub_model --url https://: --es-api-key ... +---- diff --git a/serverless/pages/explore-your-data-ml-nlp-import-model.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-import-model.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-import-model.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-import-model.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-inference.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-inference.asciidoc new file mode 100644 index 0000000000..bc643e6bea --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-inference.asciidoc @@ -0,0 +1,299 @@ += Add NLP {infer} to ingest pipelines + +:description: You can import trained models into your cluster and configure them for specific NLP tasks. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +After you https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models[deploy a trained model in your cluster], +you can use it to perform {nlp} tasks in ingest pipelines. + +. Verify that all of the +{ref}/ingest.html#ingest-prerequisites[ingest pipeline prerequisites] +are met. +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[Add an {infer} processor to an ingest pipeline]. +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[Ingest documents]. +. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[View the results]. + +[discrete] +[[add-an-infer-processor-to-an-ingest-pipeline]] +== Add an {infer} processor to an ingest pipeline + +In {kib}, you can create and edit pipelines under **Content** → **Pipelines**. + +. Click **Create pipeline** or edit an existing pipeline. +. Add an {ref}/inference-processor.html[{infer} processor] to your pipeline: ++ +.. Click **Add a processor** and select the **{infer-cap}** processor type. +.. Set **Model ID** to the name of your trained model, for example +`elastic__distilbert-base-cased-finetuned-conll03-english` or +`lang_ident_model_1`. +.. If you use the {lang-ident} model (`lang_ident_model_1`) that is provided in +your cluster: ++ +... The input field name is assumed to be `text`. If you want to identify +languages in a field with a different name, you must map your field name to +`text` in the **Field map** section. For example: ++ +[source,js] +---- +{ +"message": "text" +} +---- ++ +// NOTCONSOLE +.. Click **Add** to save the processor. +. Optional: Add a {ref}/set-processor.html[set processor] to index the ingest +timestamp. ++ +.. Click **Add a processor** and select the **Set** processor type. +.. Choose a name for the field (such as `event.ingested`) and set its value to +`{{{_ingest.timestamp}}}`. For more details, refer to +{ref}/ingest.html#access-ingest-metadata[Access ingest metadata in a processor]. +.. Click **Add** to save the processor. +. Optional: Add failure processors to handle exceptions. For example, in the +**Failure processors** section: ++ +.. Add a set processor to capture the +pipeline error message. Choose a name for the field (such as +`ml.inference_failure`) and set its value to the +`{{_ingest.on_failure_message}}` document metadata field. +.. Add a set processor to reroute +problematic documents to a different index for troubleshooting purposes. Use +the `_index` metadata field and set its value to a new name (such as +`failed-{{{ _index }}}`). For more details, refer +to {ref}/ingest.html#handling-pipeline-failures[Handling pipeline failures]. +. To test the pipeline, click **Add documents**. ++ +.. In the **Documents** tab, provide a sample document for testing. ++ +For example, to test a trained model that performs named entity recognition +(NER): ++ +[source,js] +---- +[ +{ +"_source": { +"text_field":"Hello, my name is Josh and I live in Berlin." +} +} +] +---- ++ +// NOTCONSOLE ++ +To test a trained model that performs {lang-ident}: ++ +[source,js] +---- +[ +{ +"_source":{ +"message":"Sziasztok! Ez egy rövid magyar szöveg. Nézzük, vajon sikerül-e azonosítania a language identification funkciónak? Annak ellenére is sikerülni fog, hogy a szöveg két angol szót is tartalmaz." +} +} +] +---- ++ +// NOTCONSOLE +.. Click **Run the pipeline** and verify the pipeline worked as expected. ++ +In the {lang-ident} example, the predicted value is the ISO identifier of the +language with the highest probability. In this case, it should be `hu` for +Hungarian. +.. If everything looks correct, close the panel, and click **Create +pipeline**. The pipeline is now ready for use. + +//// +/* + + + +```console +POST _ingest/pipeline/my-ner-pipeline +{ +"inference": { + "model_id": "elastic__distilbert-base-cased-finetuned-conll03-english", + "field_map": { + "review": "text_field" + }, + "on_failure": [ + { + "set": { + "description": "Set the error message", + "field": "ml.inference_failure", + "value": "{{_ingest.on_failure_message}}" + } + }, + { + "set": { + "description": "Index document to 'failed-'", + "field": "_index", + "value": "failed-{{{ _index }}}" + } + } + ] +} +} +``` +TEST[skip:TBD] + + + +*/ +//// + +[discrete] +[[ingest-documents]] +== Ingest documents + +You can now use your ingest pipeline to perform NLP tasks on your data. + +Before you add data, consider which mappings you want to use. For example, you +can create explicit mappings with the create index API in the +**{dev-tools-app}** → **Console**: + +[source,console] +---- +PUT ner-test +{ + "mappings": { + "properties": { + "ml.inference.predicted_value": {"type": "annotated_text"}, + "ml.inference.model_id": {"type": "keyword"}, + "text_field": {"type": "text"}, + "event.ingested": {"type": "date"} + } + } +} +---- + +// TEST[skip:TBD] + +[TIP] +==== +To use the `annotated_text` data type in this example, you must install the +{plugins}/mapper-annotated-text.html[mapper annotated text plugin]. For more +installation details, refer to\ +{cloud}/ec-adding-elastic-plugins.html[Add plugins provided with {ess}]. +==== + +You can then use the new pipeline to index some documents. For example, use a +bulk indexing request with the `pipeline` query parameter for your NER pipeline: + +[source,console] +---- +POST /_bulk?pipeline=my-ner-pipeline +{"create":{"_index":"ner-test","_id":"1"}} +{"text_field":"Hello, my name is Josh and I live in Berlin."} +{"create":{"_index":"ner-test","_id":"2"}} +{"text_field":"I work for Elastic which was founded in Amsterdam."} +{"create":{"_index":"ner-test","_id":"3"}} +{"text_field":"Elastic has headquarters in Mountain View, California."} +{"create":{"_index":"ner-test","_id":"4"}} +{"text_field":"Elastic's founder, Shay Banon, created Elasticsearch to solve a simple need: finding recipes!"} +{"create":{"_index":"ner-test","_id":"5"}} +{"text_field":"Elasticsearch is built using Lucene, an open source search library."} +---- + +// TEST[skip:TBD] + +Or use an individual indexing request with the `pipeline` query parameter for +your {lang-ident} pipeline: + +[source,console] +---- +POST lang-test/_doc?pipeline=my-lang-pipeline +{ + "message": "Mon pays ce n'est pas un pays, c'est l'hiver" +} +---- + +// TEST[skip:TBD] + +You can also use NLP pipelines when you are reindexing documents to a new +destination. For example, since the +{kibana-ref}/get-started.html#gs-get-data-into-kibana[sample web logs data set] +contain a `message` text field, you can reindex it with your {lang-ident} +pipeline: + +[source,console] +---- +POST _reindex +{ + "source": { + "index": "kibana_sample_data_logs", + "size": 50 + }, + "dest": { + "index": "lang-test", + "pipeline": "my-lang-pipeline" + } +} +---- + +// TEST[skip:TBD] + +However, those web log messages are unlikely to contain enough words for the +model to accurately identify the language. + +[TIP] +==== +Set the reindex `size` option to a value smaller than the `queue_capacity` +for the trained model deployment. Otherwise, requests might be rejected with a +"too many requests" 429 error code. +==== + +[discrete] +[[view-the-results]] +== View the results + +Before you can verify the results of the pipelines, you must +{kibana-ref}/data-views.html[create {data-sources}]. Then you can explore +your data in **Discover**: + +The `ml.inference.predicted_value` field contains the output from the {infer} +processor. In this NER example, there are two documents that contain the +`Elastic` organization entity. + +In this {lang-ident} example, the `ml.inference.predicted_value` contains the +ISO identifier of the language with the highest probability and the +`ml.inference.top_classes` fields contain the top five most probable languages +and their scores: + +To learn more about ingest pipelines and all of the other processors that you +can add, refer to {ref}/ingest.html[Ingest pipelines]. + +[discrete] +[[common-problems]] +== Common problems + +If you encounter problems while using your trained model in an ingest pipeline, +check the following possible causes: + +. The trained model is not deployed in your cluster. You can view its status in +**{ml-app}** → **Model Management** or use the +{ref}/get-trained-models-stats.html[get trained models statistics API]. +Unless you are using the built-in `lang_ident_model_1` model, you must +ensure your model is successfully deployed. Refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models[Deploy the model in your cluster]. +. The default input field name expected by your trained model is not present in +your source document. Use the **Field Map** option in your {infer} +processor to set the appropriate field name. +. There are too many requests. If you are using bulk ingest, reduce the number +of documents in the bulk request. If you are reindexing, use the `size` +parameter to decrease the number of documents processed in each batch. + +These common failure scenarios and others can be captured by adding failure +processors to your pipeline. For more examples, refer to +{ref}/ingest.html#handling-pipeline-failures[Handling pipeline failures]. + +[discrete] +[[further-reading]] +== Further reading + +* {blog-ref}how-to-deploy-nlp-text-embeddings-and-vector-search[How to deploy NLP: Text Embeddings and Vector Search] +* {blog-ref}how-to-deploy-nlp-named-entity-recognition-ner-example[How to deploy NLP: Named entity recognition (NER) example] +* {blog-ref}how-to-deploy-nlp-sentiment-analysis-example[How to deploy NLP: Sentiment Analysis Example] diff --git a/serverless/pages/explore-your-data-ml-nlp-inference.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-inference.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-inference.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-inference.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc new file mode 100644 index 0000000000..8436db9a89 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc @@ -0,0 +1,322 @@ += Language identification + +:description: Language identification is an NLP task and a model that enables you to determine the language of text. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +{lang-ident-cap} enables you to determine the language of text. + +A {lang-ident} model is provided in your cluster, which you can use in an +{infer} processor of an ingest pipeline by using its model ID +(`lang_ident_model_1`). For an example, refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[Add NLP {infer} to ingest pipelines]. + +The longer the text passed into the {lang-ident} model, the more accurately the +model can identify the language. It is fairly accurate on short samples (for +example, 50 character-long streams) in certain languages, but languages that are +similar to each other are harder to identify based on a short character stream. +If there is no valid text from which the identity can be inferred, the model +returns the special language code `zxx`. If you prefer to use a different +default value, you can adjust your ingest pipeline to replace `zxx` predictions +with your preferred value. + +{lang-ident-cap} takes into account Unicode boundaries when the feature set is +built. If the text has diacritical marks, then the model uses that information +for identifying the language of the text. In certain cases, the model can +detect the source language even if it is not written in the script that the +language traditionally uses. These languages are marked in the supported +languages table (see below) with the `Latn` subtag. {lang-ident-cap} supports +Unicode input. + +[discrete] +[[supported-languages]] +== Supported languages + +The table below contains the ISO codes and the English names of the languages +that {lang-ident} supports. If a language has a 2-letter `ISO 639-1` code, the +table contains that identifier. Otherwise, the 3-letter `ISO 639-2` code is +used. The `Latn` subtag indicates that the language is transliterated into Latin +script. + +// lint disable + +// [cols="\<,\<,\<,\<,\<,\<"] + +|=== +| | | | | | + +| Code +| Language +| Code +| Language +| Code +| Language + +| af +| Afrikaans +| hr +| Croatian +| pa +| Punjabi + +| am +| Amharic +| ht +| Haitian +| pl +| Polish + +| ar +| Arabic +| hu +| Hungarian +| ps +| Pashto + +| az +| Azerbaijani +| hy +| Armenian +| pt +| Portuguese + +| be +| Belarusian +| id +| Indonesian +| ro +| Romanian + +| bg +| Bulgarian +| ig +| Igbo +| ru +| Russian + +| bg-Latn +| Bulgarian +| is +| Icelandic +| ru-Latn +| Russian + +| bn +| Bengali +| it +| Italian +| sd +| Sindhi + +| bs +| Bosnian +| iw +| Hebrew +| si +| Sinhala + +| ca +| Catalan +| ja +| Japanese +| sk +| Slovak + +| ceb +| Cebuano +| ja-Latn +| Japanese +| sl +| Slovenian + +| co +| Corsican +| jv +| Javanese +| sm +| Samoan + +| cs +| Czech +| ka +| Georgian +| sn +| Shona + +| cy +| Welsh +| kk +| Kazakh +| so +| Somali + +| da +| Danish +| km +| Central Khmer +| sq +| Albanian + +| de +| German +| kn +| Kannada +| sr +| Serbian + +| el +| Greek, modern +| ko +| Korean +| st +| Southern Sotho + +| el-Latn +| Greek, modern +| ku +| Kurdish +| su +| Sundanese + +| en +| English +| ky +| Kirghiz +| sv +| Swedish + +| eo +| Esperanto +| la +| Latin +| sw +| Swahili + +| es +| Spanish, Castilian +| lb +| Luxembourgish +| ta +| Tamil + +| et +| Estonian +| lo +| Lao +| te +| Telugu + +| eu +| Basque +| lt +| Lithuanian +| tg +| Tajik + +| fa +| Persian +| lv +| Latvian +| th +| Thai + +| fi +| Finnish +| mg +| Malagasy +| tr +| Turkish + +| fil +| Filipino +| mi +| Maori +| uk +| Ukrainian + +| fr +| French +| mk +| Macedonian +| ur +| Urdu + +| fy +| Western Frisian +| ml +| Malayalam +| uz +| Uzbek + +| ga +| Irish +| mn +| Mongolian +| vi +| Vietnamese + +| gd +| Gaelic +| mr +| Marathi +| xh +| Xhosa + +| gl +| Galician +| ms +| Malay +| yi +| Yiddish + +| gu +| Gujarati +| mt +| Maltese +| yo +| Yoruba + +| ha +| Hausa +| my +| Burmese +| zh +| Chinese + +| haw +| Hawaiian +| ne +| Nepali +| zh-Latn +| Chinese + +| hi +| Hindi +| nl +| Dutch, Flemish +| zu +| Zulu + +| hi-Latn +| Hindi +| no +| Norwegian +| +| + +| hmn +| Hmong +| ny +| Chichewa +| +| +|=== + +// lint enable + +[discrete] +[[further-reading]] +== Further reading + +* {blog-ref}multilingual-search-using-language-identification-in-elasticsearch[Multilingual search using {lang-ident} in {es}] diff --git a/serverless/pages/explore-your-data-ml-nlp-lang-ident.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-lang-ident.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc new file mode 100644 index 0000000000..ec78406ca9 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc @@ -0,0 +1,281 @@ += Compatible third party NLP models + +:description: The list of compatible third party NLP models. +:keywords: ml, reference, analyze + +preview:[] + +The {stack-ml-features} support transformer models that conform to the +standard BERT model interface and use the WordPiece tokenization algorithm. + +The current list of supported architectures is: + +* BERT +* BART +* DPR bi-encoders +* DistilBERT +* ELECTRA +* MobileBERT +* RoBERTa +* RetriBERT +* MPNet +* SentenceTransformers bi-encoders with the above transformer architectures +* XLM-RoBERTa + +In general, any trained model that has a supported architecture is deployable in +{es} by using eland. However, it is not possible to test every third party +model. The following lists are therefore provided for informational purposes +only and may not be current. Elastic makes no warranty or assurance that the +{ml-features} will continue to interoperate with these third party models in +the way described, or at all. + +These models are listed by NLP task; for more information about those tasks, +refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp[Overview]. + +**Models highlighted in bold** in the list below are recommended for evaluation +purposes and to get started with the Elastic {nlp} features. + +[discrete] +[[third-party-fill-mask-models]] +== Third party fill-mask models + +* https://huggingface.co/bert-base-uncased[BERT base model] +* https://huggingface.co/distilroberta-base[DistilRoBERTa base model] +* https://huggingface.co/microsoft/mpnet-base[MPNet base model] +* https://huggingface.co/roberta-large[RoBERTa large model] + +[discrete] +[[third-party-named-entity-recognition-models]] +== Third party named entity recognition models + +* https://huggingface.co/dslim/bert-base-NER[BERT base NER] +* https://huggingface.co/elastic/distilbert-base-cased-finetuned-conll03-english[**DistilBERT base cased finetuned conll03 English**] +* https://huggingface.co/philschmid/distilroberta-base-ner-conll2003[DistilRoBERTa base NER conll2003] +* https://huggingface.co/elastic/distilbert-base-uncased-finetuned-conll03-english[**DistilBERT base uncased finetuned conll03 English**] +* https://huggingface.co/HooshvareLab/distilbert-fa-zwnj-base-ner[DistilBERT fa zwnj base NER] + +[discrete] +[[third-party-question-answering-models]] +== Third party question answering models + +* https://huggingface.co/bert-large-uncased-whole-word-masking-finetuned-squad[BERT large model (uncased) whole word masking finetuned on SQuAD] +* https://huggingface.co/distilbert-base-cased-distilled-squad[DistilBERT base cased distilled SQuAD] +* https://huggingface.co/deepset/electra-base-squad2[Electra base squad2] +* https://huggingface.co/deepset/tinyroberta-squad2[TinyRoBERTa squad2] + +[discrete] +[[third-party-text-embedding-models]] +== Third party text embedding models + +Text Embedding models are designed to work with specific scoring functions +for calculating the similarity between the embeddings they produce. +Examples of typical scoring functions are: `cosine`, `dot product` and +`euclidean distance` (also known as `l2_norm`). + +The embeddings produced by these models should be indexed in {es} using the +{ref}/dense-vector.html[dense vector field type] +with an appropriate +{ref}/dense-vector.html#dense-vector-params[similarity function] chosen for +the model. + +To find similar embeddings in {es} use the efficient +{ref}/knn-search.html#approximate-knn[Approximate k-nearest neighbor (kNN)] +search API with a text embedding as the query vector. Approximate kNN search +uses the similarity function defined in the dense vector field mapping is used +to calculate the relevance. For the best results the function must be one of +the suitable similarity functions for the model. + +Using `SentenceTransformerWrapper`: + +* https://huggingface.co/sentence-transformers/all-distilroberta-v1[All DistilRoBERTa v1] +Suitable similarity functions: `dot_product`, `cosine`, `l2_norm` +* https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2[All MiniLM L12 v2] +Suitable similarity functions: `dot_product`, `cosine`, `l2_norm` +* https://huggingface.co/sentence-transformers/all-mpnet-base-v2[**All MPNet base v2**] +Suitable similarity functions: `dot_product`, `cosine`, `l2_norm` +* https://huggingface.co/sentence-transformers/facebook-dpr-ctx_encoder-multiset-base[Facebook dpr-ctx_encoder multiset base] +Suitable similarity functions: `dot_product` +* https://huggingface.co/sentence-transformers/facebook-dpr-question_encoder-single-nq-base[Facebook dpr-question_encoder single nq base] +Suitable similarity functions: `dot_product` +* https://huggingface.co/sentence-transformers/LaBSE[LaBSE] +Suitable similarity functions: `cosine` +* https://huggingface.co/sentence-transformers/msmarco-distilbert-base-tas-b[msmarco DistilBERT base tas b] +Suitable similarity functions: `dot_product` +* https://huggingface.co/sentence-transformers/msmarco-MiniLM-L12-cos-v5[msmarco MiniLM L12 v5] +Suitable similarity functions: `dot_product`, `cosine`, `l2_norm` +* https://huggingface.co/sentence-transformers/paraphrase-mpnet-base-v2[paraphrase mpnet base v2] +Suitable similarity functions: `cosine` + +Using `DPREncoderWrapper`: + +* https://huggingface.co/castorini/ance-dpr-context-multi[ance dpr-context multi] +* https://huggingface.co/castorini/ance-dpr-question-multi[ance dpr-question multi] +* https://huggingface.co/castorini/bpr-nq-ctx-encoder[bpr nq-ctx-encoder] +* https://huggingface.co/castorini/bpr-nq-question-encoder[bpr nq-question-encoder] +* https://huggingface.co/facebook/dpr-ctx_encoder-single-nq-base[dpr-ctx_encoder single nq base] +* https://huggingface.co/facebook/dpr-ctx_encoder-multiset-base[dpr-ctx_encoder multiset base] +* https://huggingface.co/facebook/dpr-question_encoder-single-nq-base[dpr-question_encoder single nq base] +* https://huggingface.co/facebook/dpr-question_encoder-multiset-base[dpr-question_encoder multiset base] + +[discrete] +[[third-party-text-classification-models]] +== Third party text classification models + +* https://huggingface.co/nateraw/bert-base-uncased-emotion[BERT base uncased emotion] +* https://huggingface.co/Hate-speech-CNERG/dehatebert-mono-english[DehateBERT mono english] +* https://huggingface.co/bhadresh-savani/distilbert-base-uncased-emotion[DistilBERT base uncased emotion] +* https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english[DistilBERT base uncased finetuned SST-2] +* https://huggingface.co/ProsusAI/finbert[FinBERT] +* https://huggingface.co/cardiffnlp/twitter-roberta-base-sentiment[Twitter roBERTa base for Sentiment Analysis] + +[discrete] +[[third-party-zero-shot-text-classification-models]] +== Third party zero-shot text classification models + +* https://huggingface.co/facebook/bart-large-mnli[BART large mnli] +* https://huggingface.co/typeform/distilbert-base-uncased-mnli[DistilBERT base model (uncased)] +* https://huggingface.co/valhalla/distilbart-mnli-12-6[**DistilBart MNLI**] +* https://huggingface.co/typeform/mobilebert-uncased-mnli[MobileBERT: a Compact Task-Agnostic BERT for Resource-Limited Devices] +* https://huggingface.co/cross-encoder/nli-distilroberta-base[NLI DistilRoBERTa base] +* https://huggingface.co/cross-encoder/nli-roberta-base[NLI RoBERTa base] +* https://huggingface.co/typeform/squeezebert-mnli[SqueezeBERT] + +[discrete] +[[expected-model-output]] +== Expected model output + +Models used for each NLP task type must output tensors of a specific format to +be used in the Elasticsearch NLP pipelines. + +Here are the expected outputs for each task type. + +[discrete] +[[fill-mask-expected-model-output]] +=== Fill mask expected model output + +Fill mask is a specific kind of token classification; it is the base training +task of many transformer models. + +For the Elastic stack's fill mask NLP task to understand the model output, it +must have a specific format. It needs to +be a float tensor with +`shape(, , )`. + +Here is an example with a single sequence `"The capital of [MASK] is Paris"` and +with vocabulary `["The", "capital", "of", "is", "Paris", "France", "[MASK]"]`. + +Should output: + +[source] +---- + [ + [ + [ 0, 0, 0, 0, 0, 0, 0 ], // The + [ 0, 0, 0, 0, 0, 0, 0 ], // capital + [ 0, 0, 0, 0, 0, 0, 0 ], // of + [ 0.01, 0.01, 0.3, 0.01, 0.2, 1.2, 0.1 ], // [MASK] + [ 0, 0, 0, 0, 0, 0, 0 ], // is + [ 0, 0, 0, 0, 0, 0, 0 ] // Paris + ] +] +---- + +The predicted value here for `[MASK]` is `"France"` with a score of 1.2. + +[discrete] +[[named-entity-recognition-expected-model-output]] +=== Named entity recognition expected model output + +Named entity recognition is a specific token classification task. Each token in +the sequence is scored related to a specific set of classification labels. For +the Elastic Stack, we use Inside-Outside-Beginning (IOB) tagging. Elastic supports any NER entities +as long as they are IOB tagged. The default values are: +"O", "B_MISC", "I_MISC", "B_PER", "I_PER", "B_ORG", "I_ORG", "B_LOC", "I_LOC". + +The `"O"` entity label indicates that the current token is outside any entity. +`"I"` indicates that the token is inside an entity. +`"B"` indicates the beginning of an entity. +`"MISC"` is a miscellaneous entity. +`"LOC"` is a location. +`"PER"` is a person. +`"ORG"` is an organization. + +The response format must be a float tensor with +`shape(, , )`. + +Here is an example with a single sequence `"Waldo is in Paris"`: + +[source] +---- + [ + [ +// "O", "B_MISC", "I_MISC", "B_PER", "I_PER", "B_ORG", "I_ORG", "B_LOC", "I_LOC" + [ 0, 0, 0, 0.4, 0.5, 0, 0.1, 0, 0 ], // Waldo + [ 1, 0, 0, 0, 0, 0, 0, 0, 0 ], // is + [ 1, 0, 0, 0, 0, 0, 0, 0, 0 ], // in + [ 0, 0, 0, 0, 0, 0, 0, 0, 1.0 ] // Paris + ] +] +---- + +[discrete] +[[text-embedding-expected-model-output]] +=== Text embedding expected model output + +Text embedding allows for semantic embedding of text for dense information +retrieval. + +The output of the model must be the specific embedding directly without any +additional pooling. + +Eland does this wrapping for the aforementioned models. But if supplying your +own, the model must output the embedding for each inferred sequence. + +[discrete] +[[text-classification-expected-model-output]] +=== Text classification expected model output + +With text classification (for example, in tasks like sentiment analysis), the +entire sequence is classified. The output of the model must be a float tensor +with `shape(, )`. + +Here is an example with two sequences for a binary classification model of +"happy" and "sad": + +[source] +---- + [ + [ +// happy, sad + [ 0, 1], // first sequence + [ 1, 0] // second sequence + ] +] +---- + +[discrete] +[[zero-shot-text-classification-expected-model-output]] +=== Zero-shot text classification expected model output + +Zero-shot text classification allows text to be classified for arbitrary labels +not necessarily part of the original training. Each sequence is combined with +the label given some hypothesis template. The model then scores each of these +combinations according to `[entailment, neutral, contradiction]`. The output of +the model must be a float tensor with +`shape(, , 3)`. + +Here is an example with a single sequence classified against 4 labels: + +[source] +---- + [ + [ +// entailment, neutral, contradiction + [ 0.5, 0.1, 0.4], // first label + [ 0, 0, 1], // second label + [ 1, 0, 0], // third label + [ 0.7, 0.2, 0.1] // fourth label + ] +] +---- diff --git a/serverless/pages/explore-your-data-ml-nlp-model-ref.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-model-ref.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.asciidoc new file mode 100644 index 0000000000..4fbd454c17 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.asciidoc @@ -0,0 +1,328 @@ += How to deploy named entity recognition + +:description: Description to be written + +preview:[] + +You can use these instructions to deploy a +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/extract-info[named entity recognition (NER)] +model in {es}, test the model, and add it to an {infer} ingest pipeline. The +model that is used in the example is publicly available on +https://huggingface.co/[HuggingFace]. + +[discrete] +[[requirements]] +== Requirements + +To follow along the process on this page, you must have: + +* The {subscriptions}[appropriate subscription] level or the free trial period +activated. +* https://docs.docker.com/get-docker/[Docker] installed. + +[discrete] +[[deploy-a-ner-model]] +== Deploy a NER model + +You can use the {eland-docs}[Eland client] to install the {nlp} model. +Eland commands can be run in Docker. First, you need to clone the Eland +repository then create a Docker image of Eland: + +[source,shell] +---- +git clone git@github.com:elastic/eland.git +cd eland +docker build -t elastic/eland . +---- + +After the script finishes, your Eland Docker client is ready to use. + +Select a NER model from the +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/model-reference[third-party model reference list] +This example uses an +https://huggingface.co/elastic/distilbert-base-uncased-finetuned-conll03-english[uncased NER model]. + +Install the model by running the `eland_import_model_hub` command in the Docker +image: + +[source,shell] +---- +docker run -it --rm elastic/eland \ + eland_import_hub_model \ + --cloud-id $CLOUD_ID \ + -u -p \ + --hub-model-id elastic/distilbert-base-uncased-finetuned-conll03-english \ + --task-type ner \ + --start + +---- + +You need to provide an administrator username and its password and replace the +`$CLOUD_ID` with the ID of your Cloud deployment. This Cloud ID can be copied +from the deployments page on your Cloud website. + +Since the `--start` option is used at the end of the Eland import command, +{es} deploys the model ready to use. If you have multiple models and want to +select which model to deploy, you can use the **Model Management** page to +manage the starting and stopping of models. + +Go to the **Trained Models** page and synchronize your trained models. A warning +message is displayed at the top of the page that says +_"ML job and trained model synchronization required"_. Follow the link to +_"Synchronize your jobs and trained models."_ Then click **Synchronize**. You +can also wait for the automatic synchronization that occurs in every hour, or +use the {kibana-ref}/ml-sync.html[sync {ml} objects API]. + +[discrete] +[[test-the-ner-model]] +== Test the NER model + +Deployed models can be evaluated on the **Trained Models** page by selecting the +**Test model** action for the respective model. + +[role="screenshot"] +image::images/ml-nlp-ner-test.png[Test trained model UI] + +.**Test the model by using the _infer API** +[%collapsible] +===== +You can also evaluate your models by using the +{ref}/infer-trained-model-deployment.html[_infer API]. In the following +request, `text_field` is the field name where the model expects to find the +input, as defined in the model configuration. By default, if the model was +uploaded via Eland, the input field is `text_field`. + +[source,js] +---- +POST _ml/trained_models/elastic__distilbert-base-uncased-finetuned-conll03-english/_infer +{ + "docs": [ + { + "text_field": "Elastic is headquartered in Mountain View, California." + } + ] +} +---- + +The API returns a response similar to the following: + +[source,js] +---- +{ + "inference_results": [ + { + "predicted_value": "[Elastic](ORG&Elastic) is headquartered in [Mountain View](LOC&Mountain+View), [California](LOC&California).", + "entities": [ + { + "entity": "elastic", + "class_name": "ORG", + "class_probability": 0.9958921231805256, + "start_pos": 0, + "end_pos": 7 + }, + { + "entity": "mountain view", + "class_name": "LOC", + "class_probability": 0.9844731508992688, + "start_pos": 28, + "end_pos": 41 + }, + { + "entity": "california", + "class_name": "LOC", + "class_probability": 0.9972361009811214, + "start_pos": 43, + "end_pos": 53 + } + ] + } + ] +} +---- + +// NOTCONSOLE +===== + +Using the example text "Elastic is headquartered in Mountain View, California.", +the model finds three entities: an organization "Elastic", and two locations +"Mountain View" and "California". + +[discrete] +[[add-the-ner-model-to-an-infer-ingest-pipeline]] +== Add the NER model to an {infer} ingest pipeline + +You can perform bulk {infer} on documents as they are ingested by using an +{ref}/inference-processor.html[{infer} processor] in your ingest pipeline. +The novel _Les Misérables_ by Victor Hugo is used as an example for {infer} in +the following example. +https://github.com/elastic/stack-docs/blob/8.5/docs/en/stack/ml/nlp/data/les-miserables-nd.json[Download] +the novel text split by paragraph as a JSON file, then upload it by using the +{kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer]. +Give the new index the name `les-miserables` when uploading the file. + +Now create an ingest pipeline either in the +**Pipeline** page or by using the API: + +[source,js] +---- +PUT _ingest/pipeline/ner +{ + "description": "NER pipeline", + "processors": [ + { + "inference": { + "model_id": "elastic__distilbert-base-uncased-finetuned-conll03-english", + "target_field": "ml.ner", + "field_map": { + "paragraph": "text_field" + } + } + }, + { + "script": { + "lang": "painless", + "if": "return ctx['ml']['ner'].containsKey('entities')", + "source": "Map tags = new HashMap(); for (item in ctx['ml']['ner']['entities']) { if (!tags.containsKey(item.class_name)) tags[item.class_name] = new HashSet(); tags[item.class_name].add(item.entity);} ctx['tags'] = tags;" + } + } + ], + "on_failure": [ + { + "set": { + "description": "Index document to 'failed-'", + "field": "_index", + "value": "failed-{{{ _index }}}" + } + }, + { + "set": { + "description": "Set error message", + "field": "ingest.failure", + "value": "{{_ingest.on_failure_message}}" + } + } + ] +} +---- + +The `field_map` object of the `inference` processor maps the `paragraph` field +in the _Les Misérables_ documents to `text_field` (the name of the +field the model is configured to use). The `target_field` is the name of the +field to write the inference results to. + +The `script` processor pulls out the entities and groups them by type. The end +result is lists of people, locations, and organizations detected in the input +text. This painless script enables you to build visualizations from the fields +that are created. + +The purpose of the `on_failure` clause is to record errors. It sets the `_index` +meta field to a new value, and the document is now stored there. It also sets a +new field `ingest.failure` and the error message is written to this field. +{infer-cap} can fail for a number of easily fixable reasons. Perhaps the model +has not been deployed, or the input field is missing in some of the source +documents. By redirecting the failed documents to another index and setting the +error message, those failed inferences are not lost and can be reviewed later. +When the errors are fixed, reindex from the failed index to recover the +unsuccessful requests. + +Ingest the text of the novel - the index `les-miserables` - through the pipeline +you created: + +[source,js] +---- +POST _reindex +{ + "source": { + "index": "les-miserables", + "size": 50 <1> + }, + "dest": { + "index": "les-miserables-infer", + "pipeline": "ner" + } +} +---- + +<1> The default batch size for reindexing is 1000. Reducing `size` to a +smaller number makes the update of the reindexing process quicker which enables +you to follow the progress closely and detect errors early. + +Take a random paragraph from the source document as an example: + +[source,js] +---- +{ + "paragraph": "Father Gillenormand did not do it intentionally, but inattention to proper names was an aristocratic habit of his.", + "line": 12700 +} +---- + +After the text is ingested through the NER pipeline, find the resulting document +stored in {es}: + +[source,js] +---- +GET /les-miserables-infer/_search +{ + "query": { + "term": { + "line": 12700 + } + } +} +---- + +The request returns the document marked up with one identified person: + +[source,js] +---- +(...) +"paragraph": "Father Gillenormand did not do it intentionally, but inattention to proper names was an aristocratic habit of his.", + "@timestamp": "2020-01-01T17:38:25.000+01:00", + "line": 12700, + "ml": { + "ner": { + "predicted_value": "Father [Gillenormand](PER&Gillenormand) did not do it intentionally, but inattention to proper names was an aristocratic habit of his.", + "entities": [ + { + "entity": "gillenormand", + "class_name": "PER", + "class_probability": 0.9452480789333386, + "start_pos": 7, + "end_pos": 19 + } + ], + "model_id": "elastic__distilbert-base-uncased-finetuned-conll03-english" + } + }, + "tags": { + "PER": [ + "gillenormand" + ] + } +(...) +---- + +[discrete] +[[visualize-results]] +== Visualize results + +You can create a tag cloud to visualize your data processed by the {infer} +pipeline. A tag cloud is a visualization that scales words by the frequency at +which they occur. It is a handy tool for viewing the entities found in the data. + +Open **Index management** → **{data-sources-cap}**, and create a new +{data-source} from the `les-miserables-infer` index pattern. + +Open **Dashboard** and create a new dashboard. Select the +**Aggregation based-type → Tag cloud** visualization. Choose the new +{data-source} as the source. + +Add a new bucket with a term aggregation, select the `tags.PER.keyword` field, +and increase the size to 20. + +Optionally, adjust the time selector to cover the data points in the +{data-source} if you selected a time field when creating it. + +Update and save the visualization. + diff --git a/serverless/pages/explore-your-data-ml-nlp-ner-example.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-ner-example.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc new file mode 100644 index 0000000000..07ca6c61b0 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc @@ -0,0 +1,12 @@ += Elastic trained models + +:description: Models trained and provided by Elastic +:keywords: serverless, elasticsearch, tbd + +preview:[] + +You can use models that are trained and provided by Elastic that are available +within the {stack} with a click of a button. + +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/elastic-models/elser[ELSER – Elastic Learned Sparse EncodeR] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/ootb-models/lang-ident[Language identification] diff --git a/serverless/pages/explore-your-data-ml-nlp-ootb-models.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-ootb-models.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.asciidoc new file mode 100644 index 0000000000..5f5cdeead6 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.asciidoc @@ -0,0 +1,99 @@ += Search and compare text + +:description: NLP tasks for generate embeddings which can be used to search in text or compare different peieces of text. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +The {stack-ml-features} can generate embeddings, which you can use to search in +unstructured text or compare different pieces of text. + +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/search-compare-text[Text embedding] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/search-compare-text[Text similarity] + +[discrete] +[[text-embedding]] +== Text embedding + +Text embedding is a task which produces a mathematical representation of text +called an embedding. The {ml} model turns the text into an array of numerical +values (also known as a _vector_). Pieces of content with similar meaning have +similar representations. This means it is possible to determine whether +different pieces of text are either semantically similar, different, or even +opposite by using a mathematical similarity function. + +This task is responsible for producing only the embedding. When the +embedding is created, it can be stored in a dense_vector field and used at +search time. For example, you can use these vectors in a +k-nearest neighbor (kNN) search to achieve semantic search capabilities. + +The following is an example of producing a text embedding: + +[source,js] +---- +{ + docs: [{"text_field": "The quick brown fox jumps over the lazy dog."}] +} +... +---- + +// NOTCONSOLE + +The task returns the following result: + +[source,js] +---- +... +{ + "predicted_value": [0.293478, -0.23845, ..., 1.34589e2, 0.119376] + ... +} +... +---- + +// NOTCONSOLE + +[discrete] +[[text-similarity]] +== Text similarity + +The text similarity task estimates how similar two pieces of text are to each +other and expresses the similarity in a numeric value. This is commonly referred +to as cross-encoding. This task is useful for ranking document text when +comparing it to another provided text input. + +You can provide multiple strings of text to compare to another text input +sequence. Each string is compared to the given text sequence at inference time +and a prediction of similarity is calculated for every string of text. + +[source,js] +---- +{ + "docs":[{ "text_field": "Berlin has a population of 3,520,031 registered inhabitants in an area of 891.82 square kilometers."}, {"text_field": "New York City is famous for the Metropolitan Museum of Art."}], + "inference_config": { + "text_similarity": { + "text": "How many people live in Berlin?" + } + } +} +---- + +// NOTCONSOLE + +In the example above, every string in the `docs` array is compared individually +to the text provided in the `text_similarity`.`text` field and a predicted +similarity is calculated for both as the API response shows: + +[source,js] +---- +... +{ + "predicted_value": 7.235751628875732 +}, +{ + "predicted_value": -11.562295913696289 +} +... +---- + +// NOTCONSOLE diff --git a/serverless/pages/explore-your-data-ml-nlp-search-compare.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-search-compare.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc new file mode 100644 index 0000000000..6e34448682 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc @@ -0,0 +1,24 @@ += Select a trained model + +:keywords: serverless, elasticsearch, tbd + +preview:[] + +Per the +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp[Overview], +there are multiple ways that you can use NLP features within the {stack}. +After you determine which type of NLP task you want to perform, you must choose +an appropriate trained model. + +The simplest method is to use a model that has already been fine-tuned for the +type of analysis that you want to perform. For example, there are models and +data sets available for specific NLP tasks on +https://huggingface.co/models[Hugging Face]. These instructions assume you're +using one of those models and do not describe how to create new models. For the +current list of supported model architectures, refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/model-reference[Compatible third party NLP models]. + +If you choose to perform {lang-ident} by using the `lang_ident_model_1` that is +provided in the cluster, no further steps are required to import or deploy the +model. You can skip to using the model in +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[ingestion pipelines]. diff --git a/serverless/pages/explore-your-data-ml-nlp-select-model.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-select-model.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-select-model.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc new file mode 100644 index 0000000000..cde6a09341 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc @@ -0,0 +1,67 @@ += Try it out + +:description: You can import trained models into your cluster and configure them for specific NLP tasks. +:keywords: serverless, elasticsearch, tbd + +preview:[] + +When the model is deployed on at least one node in the cluster, you can begin to +perform inference. _{infer-cap}_ is a {ml} feature that enables you to use +your trained models to perform NLP tasks (such as text extraction, +classification, or embeddings) on incoming data. + +The simplest method to test your model against new data is to use the +**Test model** action in {kib}. You can either provide some input text or use a +field of an existing index in your cluster to test the model: + +[role="screenshot"] +image::images/ml-nlp-test-ner.png[Testing a sentence with two named entities against a NER trained model in the {ml} app] + +Alternatively, you can use the +{ref}/infer-trained-model.html[infer trained model API]. +For example, to try a named entity recognition task, provide some sample text: + +[source,console] +---- +POST /_ml/trained_models/elastic__distilbert-base-cased-finetuned-conll03-english/_infer +{ + "docs":[{"text_field": "Sasha bought 300 shares of Acme Corp in 2022."}] +} +---- + +// TEST[skip:TBD] + +In this example, the response contains the annotated text output and the +recognized entities: + +[source,console-result] +---- +{ + "inference_results" : [ + { + "predicted_value" : "[Sasha](PER&Sasha) bought 300 shares of [Acme Corp](ORG&Acme+Corp) in 2022.", + "entities" : [ + { + "entity" : "Sasha", + "class_name" : "PER", + "class_probability" : 0.9953193407987492, + "start_pos" : 0, + "end_pos" : 5 + }, + { + "entity" : "Acme Corp", + "class_name" : "ORG", + "class_probability" : 0.9996392198381716, + "start_pos" : 27, + "end_pos" : 36 + } + ] + } + ] +} +---- + +// NOTCONSOLE + +If you are satisfied with the results, you can add these NLP tasks in your +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[ingestion pipelines]. diff --git a/serverless/pages/explore-your-data-ml-nlp-test-inference.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-test-inference.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.asciidoc new file mode 100644 index 0000000000..e202e9c4d3 --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.asciidoc @@ -0,0 +1,353 @@ += How to deploy a text embedding model and use it for semantic search + +:description: Description to be written + +preview:[] + +You can use these instructions to deploy a +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/search-compare-text[text embedding] +model in {es}, test the model, and add it to an {infer} ingest pipeline. It +enables you to generate vector representations of text and perform vector +similarity search on the generated vectors. The model that is used in the +example is publicly available on https://huggingface.co/[HuggingFace]. + +The example uses a public data set from the +https://microsoft.github.io/msmarco/#ranking[MS MARCO Passage Ranking Task]. It +consists of real questions from the Microsoft Bing search engine and human +generated answers for them. The example works with a sample of this data set, +uses a model to produce text embeddings, and then runs vector search on it. + +[discrete] +[[requirements]] +== Requirements + +To follow along the process on this page, you must have: + +* The {subscriptions}[appropriate subscription] level or the free trial period +activated. +* https://docs.docker.com/get-docker/[Docker] installed. + +[discrete] +[[deploy-a-text-embedding-model]] +== Deploy a text embedding model + +You can use the {eland-docs}[Eland client] to install the {nlp} model. +Eland commands can be run in Docker. First, you need to clone the Eland +repository then create a Docker image of Eland: + +[source,shell] +---- +git clone git@github.com:elastic/eland.git +cd eland +docker build -t elastic/eland . +---- + +After the script finishes, your Eland Docker client is ready to use. + +Select a text embedding model from the +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/model-reference[third-party model reference list] +This example uses the +https://huggingface.co/sentence-transformers/msmarco-MiniLM-L-12-v3[msmarco-MiniLM-L-12-v3] +sentence-transformer model. + +Install the model by running the `eland_import_model_hub` command in the Docker +image: + +[source,shell] +---- +docker run -it --rm elastic/eland \ + eland_import_hub_model \ + --cloud-id $CLOUD_ID \ + -u -p \ + --hub-model-id sentence-transformers/msmarco-MiniLM-L-12-v3 \ + --task-type text_embedding \ + --start +---- + +You need to provide an administrator username and password and replace the +`$CLOUD_ID` with the ID of your Cloud deployment. This Cloud ID can be copied +from the deployments page on your Cloud website. + +Since the `--start` option is used at the end of the Eland import command, +{es} deploys the model ready to use. If you have multiple models and want to +select which model to deploy, you can use the **Model Management** page to +manage the starting and stopping of models. + +Go to the **Trained Models** page and synchronize your trained models. A warning +message is displayed at the top of the page that says +_"ML job and trained model synchronization required"_. Follow the link to +_"Synchronize your jobs and trained models."_ Then click **Synchronize**. You +can also wait for the automatic synchronization that occurs in every hour, or +use the {kibana-ref}/ml-sync.html[sync {ml} objects API]. + +[discrete] +[[test-the-text-embedding-model]] +== Test the text embedding model + +Deployed models can be evaluated on the **Trained Models** page by selecting the +**Test model** action for the respective model. + +[role="screenshot"] +image::images/ml-nlp-text-emb-test.png[Test trained model UI] + +.**Test the model by using the _infer API** +[%collapsible] +===== +You can also evaluate your models by using the +{ref}/infer-trained-model-deployment.html[_infer API]. In the following +request, `text_field` is the field name where the model expects to find the +input, as defined in the model configuration. By default, if the model was +uploaded via Eland, the input field is `text_field`. + +[source,js] +---- +POST /_ml/trained_models/sentence-transformers__msmarco-minilm-l-12-v3/_infer +{ + "docs": { + "text_field": "How is the weather in Jamaica?" + } +} +---- + +The API returns a response similar to the following: + +[source,js] +---- +{ + "inference_results": [ + { + "predicted_value": [ + 0.39521875977516174, + -0.3263707458972931, + 0.26809820532798767, + 0.30127981305122375, + 0.502890408039093, + ... + ] + } + ] +} +---- + +// NOTCONSOLE +===== + +The result is the predicted dense vector transformed from the example text. + +[discrete] +[[load-data]] +== Load data + +In this step, you load the data that you later use in an ingest pipeline to get +the embeddings. + +The data set `msmarco-passagetest2019-top1000` is a subset of the MS MARCO +Passage Ranking data set used in the testing stage of the 2019 TREC Deep +Learning Track. It contains 200 queries and for each query a list of relevant +text passages extracted by a simple information retrieval (IR) system. From that +data set, all unique passages with their IDs have been extracted and put into a +https://github.com/elastic/stack-docs/blob/8.5/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file], +totaling 182469 passages. In the following, this file is used as the example +data set. + +Upload the file by using the +{kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer]. +Name the first column `id` and the second one `text`. The index name is +`collection`. After the upload is done, you can see an index named `collection` +with 182469 documents. + +[role="screenshot"] +image::images/ml-nlp-text-emb-data.png[Importing the data] + +[discrete] +[[add-the-text-embedding-model-to-an-infer-ingest-pipeline]] +== Add the text embedding model to an {infer} ingest pipeline + +Process the initial data with an +{ref}/inference-processor.html[{infer} processor]. It adds an embedding for each +passage. For this, create a text embedding ingest pipeline and then reindex the +initial data with this pipeline. + +Now create an ingest pipeline either in the +{ml-docs}/ml-nlp-inference.html#ml-nlp-inference-processor[{stack-manage-app} UI] +or by using the API: + +[source,js] +---- +PUT _ingest/pipeline/text-embeddings +{ + "description": "Text embedding pipeline", + "processors": [ + { + "inference": { + "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", + "target_field": "text_embedding", + "field_map": { + "text": "text_field" + } + } + } + ], + "on_failure": [ + { + "set": { + "description": "Index document to 'failed-'", + "field": "_index", + "value": "failed-{{{_index}}}" + } + }, + { + "set": { + "description": "Set error message", + "field": "ingest.failure", + "value": "{{_ingest.on_failure_message}}" + } + } + ] +} +---- + +The passages are in a field named `text`. The `field_map` maps the text to the +field `text_field` that the model expects. The `on_failure` handler is set to +index failures into a different index. + +Before ingesting the data through the pipeline, create the mappings of the +destination index, in particular for the field `text_embedding.predicted_value` +where the ingest processor stores the embeddings. The `dense_vector` field must +be configured with the same number of dimensions (`dims`) as the text embedding +produced by the model. That value can be found in the `embedding_size` option in +the model configuration either under the Trained Models page or in the response +body of the {ref}/get-trained-models.html[Get trained models API] call. The +msmarco-MiniLM-L-12-v3 model has embedding_size of 384, so `dims` is set to 384. + +[source,js] +---- +PUT collection-with-embeddings +{ + "mappings": { + "properties": { + "text_embedding.predicted_value": { + "type": "dense_vector", + "dims": 384, + "index": true, + "similarity": "cosine" + }, + "text": { + "type": "text" + } + } + } +} +---- + +Create the text embeddings by reindexing the data to the +`collection-with-embeddings` index through the {infer} pipeline. The {infer} +ingest processor inserts the embedding vector into each document. + +[source,js] +---- +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "collection", + "size": 50 <1> + }, + "dest": { + "index": "collection-with-embeddings", + "pipeline": "text-embeddings" + } +} +---- + +<1> The default batch size for reindexing is 1000. Reducing `size` to a +smaller number makes the update of the reindexing process quicker which enables +you to follow the progress closely and detect errors early. + +The API call returns a task ID that can be used to monitor the progress: + +[source,js] +---- +GET _tasks/ +---- + +You can also open the model stat UI to follow the progress. + +[role="screenshot"] +image::images/ml-nlp-text-emb-reindex.png[Model status UI] + +After the reindexing is finished, the documents in the new index contain the +{infer} results – the vector embeddings. + +[discrete] +[[semantic-search]] +== Semantic search + +After the dataset has been enriched with vector embeddings, you can query the +data using {ref}/knn-search.html[semantic search]. Pass a +`query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and +provide the query text and the model you have used to create vector embeddings. +This example searches for "How is the weather in Jamaica?": + +[source,js] +---- +GET collection-with-embeddings/_search +{ + "knn": { + "field": "text_embedding.predicted_value", + "query_vector_builder": { + "text_embedding": { + "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", + "model_text": "How is the weather in Jamaica?" + } + }, + "k": 10, + "num_candidates": 100 + }, + "_source": [ + "id", + "text" + ] +} +---- + +As a result, you receive the top 10 documents that are closest in meaning to the +query from the `collection-with-embedings` index sorted by their proximity to +the query: + +[source,js] +---- +"hits" : [ + { + "_index" : "collection-with-embeddings", + "_id" : "47TPtn8BjSkJO8zzKq_o", + "_score" : 0.94591534, + "_source" : { + "id" : 434125, + "text" : "The climate in Jamaica is tropical and humid with warm to hot temperatures all year round. The average temperature in Jamaica is between 80 and 90 degrees Fahrenheit. Jamaican nights are considerably cooler than the days, and the mountain areas are cooler than the lower land throughout the year. Continue Reading." + } + }, + { + "_index" : "collection-with-embeddings", + "_id" : "3LTPtn8BjSkJO8zzKJO1", + "_score" : 0.94536424, + "_source" : { + "id" : 4498474, + "text" : "The climate in Jamaica is tropical and humid with warm to hot temperatures all year round. The average temperature in Jamaica is between 80 and 90 degrees Fahrenheit. Jamaican nights are considerably cooler than the days, and the mountain areas are cooler than the lower land throughout the year" + } + }, + { + "_index" : "collection-with-embeddings", + "_id" : "KrXPtn8BjSkJO8zzPbDW", + "_score" : 0.9432083, + "_source" : { + "id" : 190804, + "text" : "Quick Answer. The climate in Jamaica is tropical and humid with warm to hot temperatures all year round. The average temperature in Jamaica is between 80 and 90 degrees Fahrenheit. Jamaican nights are considerably cooler than the days, and the mountain areas are cooler than the lower land throughout the year. Continue Reading" + } + }, + (...) +] +---- + +If you want to do a quick verification of the results, follow the steps of the +_Quick verification_ section of +{blog-ref}how-to-deploy-nlp-text-embeddings-and-vector-search#[this blog post]. diff --git a/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp-text-embedding-example.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc new file mode 100644 index 0000000000..1e213d13fd --- /dev/null +++ b/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc @@ -0,0 +1,33 @@ += Machine Learning - Natural Language Processing + +:keywords: serverless, elasticsearch, tbd + +preview:[] + +{nlp-cap} (NLP) refers to the way in which we can use software to understand +natural language in spoken word or written text. + +Classically, NLP was performed using linguistic rules, dictionaries, regular +expressions, and {ml} for specific tasks such as automatic categorization or +summarization of text. In recent years, however, deep learning techniques have +taken over much of the NLP landscape. Deep learning capitalizes on the +availability of large scale data sets, cheap computation, and techniques for +learning at scale with less human involvement. Pre-trained language models that +use a transformer architecture have been particularly successful. For example, +BERT is a pre-trained language model that was released by Google in 2018. Since +that time, it has become the inspiration for most of today’s modern NLP +techniques. The {stack} {ml} features are structured around BERT and +transformer models. These features support BERT’s tokenization scheme (called +WordPiece) and transformer models that conform to the standard BERT model +interface. For the current list of supported architectures, refer to +https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/model-reference[Compatible third party NLP models]. + +To incorporate transformer models and make predictions, {es} uses libtorch, +which is an underlying native library for PyTorch. Trained models must be in a +TorchScript representation for use with {stack} {ml} features. + +You can perform the following NLP operations: + +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/extract-info[Extract information] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/classify-text[Classify text] +* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/search-compare-text[Search and compare text] diff --git a/serverless/pages/explore-your-data-ml-nlp.mdx b/serverless/pages/hidden/explore-your-data-ml-nlp.mdx similarity index 100% rename from serverless/pages/explore-your-data-ml-nlp.mdx rename to serverless/pages/hidden/explore-your-data-ml-nlp.mdx diff --git a/serverless/pages/project-settings/action-connectors.asciidoc b/serverless/pages/project-settings/action-connectors.asciidoc new file mode 100644 index 0000000000..d122d32f6c --- /dev/null +++ b/serverless/pages/project-settings/action-connectors.asciidoc @@ -0,0 +1,341 @@ +[[-serverless-action-connectors]] += {connectors-app} + +:description: Configure connections to third party systems for use in cases and rules. +:keywords: serverless + +preview:[] + +This content applies to: + +The list of available connectors varies by project type. + + + +//// +/* Connectors provide a central place to store connection information for services and integrations with third party systems. +Actions are instantiations of a connector that are linked to rules and run as background tasks on the {kib} server when rule conditions are met. */ +//// + +//// +/* {kib} provides the following types of connectors for use with {alert-features} : + +- [D3 Security]{(kibana-ref}/d3security-action-type.html) +- [Email]{(kibana-ref}/email-action-type.html) +- [Generative AI]{(kibana-ref}/gen-ai-action-type.html) +- [IBM Resilient]{(kibana-ref}/resilient-action-type.html) +- [Index]{(kibana-ref}/index-action-type.html) +- [Jira]{(kibana-ref}/jira-action-type.html) +- [Microsoft Teams]{(kibana-ref}/teams-action-type.html) +- [Opsgenie]{(kibana-ref}/opsgenie-action-type.html) +- [PagerDuty]{(kibana-ref}/pagerduty-action-type.html) +- [ServerLog]{(kibana-ref}/server-log-action-type.html) +- [ServiceNow ITSM]{(kibana-ref}/servicenow-action-type.html) +- [ServiceNow SecOps]{(kibana-ref}/servicenow-sir-action-type.html) +- [ServiceNow ITOM]{(kibana-ref}/servicenow-itom-action-type.html) +- [Slack]{(kibana-ref}/slack-action-type.html) +- [Swimlane]{(kibana-ref}/swimlane-action-type.html) +- [Tines]{(kibana-ref}/tines-action-type.html) +- [Torq]{(kibana-ref}/torq-action-type.html) +- [Webhook]{(kibana-ref}/webhook-action-type.html) +- [Webhook - Case Management]{(kibana-ref}/cases-webhook-action-type.html) +- [xMatters]{(kibana-ref}/xmatters-action-type.html) */ +//// + +// [cols="2"] + +//// +/* | | | +|---|---| +| Email | Send email from your server. | +| {ibm-r} | Create an incident in {ibm-r}. | +| Index | Index data into Elasticsearch. | +| Jira | Create an incident in Jira. | +| Microsoft Teams | Send a message to a Microsoft Teams channel. | +| Opsgenie | Create or close an alert in Opsgenie. | +| PagerDuty | Send an event in PagerDuty. | +| ServerLog | Add a message to a Kibana log. | +| {sn-itsm} | Create an incident in {sn}. | +| {sn-sir} | Create a security incident in {sn}. | +| {sn-itom} | Create an event in {sn}. | +| Slack | Send a message to a Slack channel or user. | +| {swimlane} | Create an incident in {swimlane}. | +| Tines | Send events to a Tines Story. | +| {webhook} | Send a request to a web service. | +| {webhook-cm} | Send a request to a Case Management web service. | +| xMatters | Send actionable alerts to on-call xMatters resources. | +| Torq | +| Generative AI | +| D3 Security | */ +//// + +//// +/* + +Some connector types are paid commercial features, while others are free. +For a comparison of the Elastic subscription levels, go to +[the subscription page]{(subscriptions}). + + */ +//// + +//// +/* +## Managing connectors + +Rules use connectors to route actions to different destinations like log files, ticketing systems, and messaging tools. While each {kib} app can offer their own types of rules, they typically share connectors. **{stack-manage-app} → {connectors-ui}** offers a central place to view and manage all the connectors in the current space. + +![Example connector listing in the {rules-ui} UI](../images/action-types/management-connectors-connector-listing.png) */ +//// + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +//// +/* +## Required permissions + +Access to connectors is granted based on your privileges to alerting-enabled +features. For more information, go to Security. + +## Connector networking configuration + +Use the action configuration settings to customize connector networking configurations, such as proxies, certificates, or TLS settings. You can set configurations that apply to all your connectors or use `xpack.actions.customHostSettings` to set per-host configurations. + +## Connector list + +In **{stack-manage-app} → {connectors-ui}**, you can find a list of the connectors +in the current space. You can use the search bar to find specific connectors by +name and type. The **Type** dropdown also enables you to filter to a subset of +connector types. + +![Filtering the connector list by types of connectors](../images/action-types/management-connectors-connector-filter-by-type.png) */ +//// + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +//// +/* +You can delete individual connectors using the trash icon. Alternatively, select +multiple connectors and delete them in bulk using the **Delete** button. + +![Deleting connectors individually or in bulk](../images/action-types/management-connectors-connector-delete.png) */ +//// + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +//// +/* + + +You can delete a connector even if there are still actions referencing it. +When this happens the action will fail to run and errors appear in the {kib} logs. + + + +## Creating a new connector + +New connectors can be created with the **Create connector** button, which guides +you to select the type of connector and configure its properties. + +![Connector select type](../images/action-types/management-connectors-connector-select-type.png) + +After you create a connector, it is available for use any time you set up an +action in the current space. + +For out-of-the-box and standardized connectors, refer to +preconfigured connectors. + + +You can also manage connectors as resources with the [Elasticstack provider](https://registry.terraform.io/providers/elastic/elasticstack/latest) for Terraform. +For more details, refer to the [elasticstack_kibana_action_connector](https://registry.terraform.io/providers/elastic/elasticstack/latest/docs/resources/kibana_action_connector) resource. + + +## Importing and exporting connectors + +To import and export connectors, use the +Saved Objects Management UI. + + + +If a connector is missing sensitive information after the import, a **Fix** +button appears in **{connectors-ui}**. + +![Connectors with missing secrets](../images/action-types/management-connectors-connectors-with-missing-secrets.png) + +## Monitoring connectors + +The Task Manager health API helps you understand the performance of all tasks in your environment. +However, if connectors fail to run, they will report as successful to Task Manager. The failure stats will not +accurately depict the performance of connectors. + +For more information on connector successes and failures, refer to the Event log index. + +The include that was here is another page */ +//// diff --git a/serverless/pages/action-connectors.mdx b/serverless/pages/project-settings/action-connectors.mdx similarity index 100% rename from serverless/pages/action-connectors.mdx rename to serverless/pages/project-settings/action-connectors.mdx diff --git a/serverless/pages/project-settings/api-keys.asciidoc b/serverless/pages/project-settings/api-keys.asciidoc new file mode 100644 index 0000000000..e6f27cee95 --- /dev/null +++ b/serverless/pages/project-settings/api-keys.asciidoc @@ -0,0 +1,119 @@ +[[-serverless-api-keys]] += {api-keys-app} + +:description: API keys allow access to the {stack} on behalf of a user. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +API keys are security mechanisms used to authenticate and authorize access to {stack} resources, +and ensure that only authorized users or applications are able to interact with the {stack}. + +For example, if you extract data from an {es} cluster on a daily basis, you might create an API key tied to your credentials, configure it with minimum access, and then put the API credentials into a cron job. +Or, you might create API keys to automate ingestion of new data from remote sources, without a live user interaction. + +You can manage your keys in **{project-settings} → {manage-app} → {api-keys-app}**: + +[role="screenshot"] +image::images/api-key-management.png["API keys UI"] + +// TBD: This image was refreshed but should be automated + +A _personal API key_ allows external services to access the {stack} on behalf of a user. + +// Cross-Cluster API key: allows remote clusters to connect to your local cluster. + +A _managed API key_ is created and managed by {kib} to correctly run background tasks. + +// TBD (accurate?) Secondary credentials have the same or lower access rights. + +//// +/* ## Security privileges + +You must have the `manage_security`, `manage_api_key`, or the `manage_own_api_key` +cluster privileges to use API keys in Elastic. API keys can also be seen in a readonly view with access to the page and the `read_security` cluster privilege. To manage roles, open the main menu, then click +**Management → Custom Roles**, or use the Role Management API. */ +//// + +[discrete] +[[-serverless-api-keys-create-an-api-key]] +== Create an API key + +In **{api-keys-app}**, click **Create API key**: + +[role="screenshot"] +image::images/create-personal-api-key.png["Create API key UI"] + +Once created, you can copy the encoded API key and use it to send requests to the {es} HTTP API. For example: + +[source,bash] +---- +curl "${ES_URL}" \ +-H "Authorization: ApiKey ${API_KEY}" +---- + +[IMPORTANT] +==== +API keys are intended for programmatic access. Don't use API keys to +authenticate access using a web browser. +==== + +[discrete] +[[-serverless-api-keys-restrict-privileges]] +=== Restrict privileges + +When you create or update an API key, use **Restrict privileges** to limit the permissions. Define the permissions using a JSON `role_descriptors` object, where you specify one or more roles and the associated privileges. + +For example, the following `role_descriptors` object defines a `books-read-only` role that limits the API key to `read` privileges on the `books` index. + +[source,json] +---- +{ + "books-read-only": { + "cluster": [], + "indices": [ + { + "names": ["books"], + "privileges": ["read"] + } + ], + "applications": [], + "run_as": [], + "metadata": {}, + "transient_metadata": { + "enabled": true + } + } +} +---- + +For the `role_descriptors` object schema, check out the {ref}/security-api-create-api-key.html#security-api-create-api-key-request-body[`/_security/api_key` endpoint] docs. For supported privileges, check {ref}/security-privileges.html#privileges-list-indices[Security privileges]. + +[discrete] +[[-serverless-api-keys-update-an-api-key]] +== Update an API key + +In **{api-keys-app}**, click on the name of the key. +You can update only **Restrict privileges** and **Include metadata**. + +// TBD: Refer to the update API key documentation to learn more about updating personal API keys. + +[discrete] +[[-serverless-api-keys-view-and-delete-api-keys]] +== View and delete API keys + +The **{api-keys-app}** app lists your API keys, including the name, date created, and status. +When API keys expire, the status changes from `Active` to `Expired`. + +//// +/* +TBD: RBAC requirements for serverless? +If you have `manage_security` or `manage_api_key` permissions, +you can view the API keys of all users, and see which API key was +created by which user in which realm. +If you have only the `manage_own_api_key` permission, you see only a list of your own keys. */ +//// + +You can delete API keys individually or in bulk. diff --git a/serverless/pages/api-keys.mdx b/serverless/pages/project-settings/api-keys.mdx similarity index 100% rename from serverless/pages/api-keys.mdx rename to serverless/pages/project-settings/api-keys.mdx diff --git a/serverless/pages/project-settings/custom-roles.asciidoc b/serverless/pages/project-settings/custom-roles.asciidoc new file mode 100644 index 0000000000..6bc9fb507e --- /dev/null +++ b/serverless/pages/project-settings/custom-roles.asciidoc @@ -0,0 +1,114 @@ +[[-serverless-custom-roles]] += Custom roles + +:description: Create and manage roles that grant privileges within your project. +:keywords: serverless, Elasticsearch, Security + +ifeval::["{serverlessCustomRoles}" == "false"] +coming:[Coming soonundefinedundefined] +endif::[] + +ifeval::["{serverlessCustomRoles}" == "true"] +preview:[]This content applies to: + +The built-in <> and <> are great for getting started with {serverless-full}, and for system administrators who do not need more restrictive access. + +As an administrator, however, you have the ability to create your own roles to describe exactly the kind of access your users should have within a specific project. +For example, you might create a marketing_user role, which you then assign to all users in your marketing department. +This role would grant access to all of the necessary data and features for this team to be successful, without granting them access they don't require. + +// Derived from https://www.elastic.co/guide/en/kibana/current/tutorial-secure-access-to-kibana.html + +All custom roles grant the same access as the `Viewer` instance access role with regards to {ecloud} privileges. +To grant more {ecloud} privileges, assign more roles. +Users receive a union of all their roles' privileges. + +You can manage custom roles in **{project-settings} → {manage-app} →{custom-roles-app}**. +To create a new custom role, click the **Create role** button. +To clone, delete, or edit a role, open the actions menu: + +[role="screenshot"] +image::images/custom-roles-ui.png[Custom Roles app] + +// TO-DO: This screenshot needs to be refreshed and automated. + +Roles are a collection of privileges that enable users to access project features and data. +For example, when you create a custom role, you can assign {es} cluster and index privileges and {kib} privileges. + +[NOTE] +==== +You cannot assign {ref}/security-privileges.html#_run_as_privilege[run as privileges] in {serverless-full} custom roles. +==== + +[discrete] +[[-serverless-custom-roles-es-cluster-privileges]] +== {es} cluster privileges + +Cluster privileges grant access to monitoring and management features in {es}. +They also enable some {stack-manage-app} capabilities in your project. + +[role="screenshot"] +image::images/custom-roles-cluster-privileges.png[Create a custom role and define {es} cluster privileges] + +// TO-DO: This screenshot needs to be refreshed and automated. + +Refer to {ref}/security-privileges.html#privileges-list-cluster[cluster privileges] for a complete description of available options. + +[discrete] +[[-serverless-custom-roles-es-index-privileges]] +== {es} index privileges + +Each role can grant access to multiple data indices, and each index can have a different set of privileges. +Typically, you will grant the `read` and `view_index_metadata` privileges to each index that you expect your users to work with. +For example, grant access to indices that match an `acme-marketing-*` pattern: + +[role="screenshot"] +image::images/custom-roles-index-privileges.png[Create a custom role and define {es} index privileges] + +// TO-DO: This screenshot needs to be refreshed and automated. + +Refer to {ref}/security-privileges.html#privileges-list-indices[index privileges] for a complete description of available options. + +Document-level and field-level security affords you even more granularity when it comes to granting access to your data. +With document-level security (DLS), you can write an {es} query to describe which documents this role grants access to. +With field-level security (FLS), you can instruct {es} to grant or deny access to specific fields within each document. + +// Derived from https://www.elastic.co/guide/en/kibana/current/kibana-role-management.html#adding_cluster_privileges + +[discrete] +[[-serverless-custom-roles-kib-privileges]] +== {kib} privileges + +When you create a custom role, click **Add Kibana privilege** to grant access to specific features. +The features that are available vary depending on the project type. +For example, in {es-serverless}: + +[role="screenshot"] +image::images/custom-roles-kibana-privileges.png[Create a custom role and define {kib} privileges] + +// TO-DO: This screenshot needs to be refreshed and automated. + +Open the **Spaces** selection control to specify whether to grant the role access to all spaces or one or more individual spaces. +When using the **Customize by feature** option, you can choose either **All**, **Read** or **None** for access to each feature. + +All:: +Grants full read-write access. + +Read:: +Grants read-only access. + +None:: +Does not grant any access. + +Some features have finer access control and you can optionally enable sub-feature privileges. + +.New features +[NOTE] +==== +As new features are added to {serverless-full}, roles that use the custom option do not automatically get access to the new features. You must manually update the roles. +==== + +After your roles are set up, the next step to securing access is to assign roles to your users. +Click the **Assign roles** link to go to the **Members** tab of the **Organization** page. +Learn more in <>. +endif::[] diff --git a/serverless/pages/custom-roles.mdx b/serverless/pages/project-settings/custom-roles.mdx similarity index 100% rename from serverless/pages/custom-roles.mdx rename to serverless/pages/project-settings/custom-roles.mdx diff --git a/serverless/pages/project-settings/data-views.asciidoc b/serverless/pages/project-settings/data-views.asciidoc new file mode 100644 index 0000000000..204d69a72d --- /dev/null +++ b/serverless/pages/project-settings/data-views.asciidoc @@ -0,0 +1,178 @@ +[[-serverless-data-views]] += {data-sources-cap} + +:description: Elastic requires a {data-source} to access the {es} data that you want to explore. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +A {data-source} can point to one or more indices, {ref}/data-streams.html[data streams], or {ref}/alias.html[index aliases]. +For example, a {data-source} can point to your log data from yesterday or all indices that contain your data. + +//// +/* + +## Required permissions + +* Access to **Data Views** requires the {kib} privilege + `Data View Management`. + +* To create a {data-source}, you must have the {es} privilege + `view_index_metadata`. + +* If a read-only indicator appears, you have insufficient privileges + to create or save {data-sources}. In addition, the buttons to create {data-sources} or + save existing {data-sources} are not visible. For more information, + refer to Granting access to {kib}. +*/ +//// + +[discrete] +[[-serverless-data-views-create-a-data-view]] +== Create a data view + +After you've loaded your data, follow these steps to create a {data-source}: + +// + +. Open **Discover** then open the data view menu. ++ +[role="screenshot"] +image:images/discover-find-data-view.png[How to set the {data-source} in Discover] ++ +Alternatively, go to **{project-settings} → {manage-app} → {data-views-app}**. +. Click **Create a {data-source}**. +. Give your {data-source} a name. +. Start typing in the **Index pattern** field, and Elastic looks for the names of +indices, data streams, and aliases that match your input. You can +view all available sources or only the sources that the data view targets. +[role="screenshot"] +image::images/discover-create-data-view.png["Create data view"] ++ +** To match multiple sources, use a wildcard (*). `filebeat-*` matches +`filebeat-apache-a`, `filebeat-apache-b`, and so on. +** To match multiple single sources, enter their names, +separated by a comma. Do not include a space after the comma. +`filebeat-a,filebeat-b` matches two indices. +** To exclude a source, use a minus sign (-), for example, `-test3`. +. Open the **Timestamp field** dropdown, +and then select the default field for filtering your data by time. ++ +** If you don't set a default time field, you can't use +global time filters on your dashboards. This is useful if +you have multiple time fields and want to create dashboards that combine visualizations +based on different timestamps. +** If your index doesn't have time-based data, choose **I don't want to use the time filter**. +. Click **Show advanced settings** to: ++ +** Display hidden and system indices. +** Specify your own {data-source} name. For example, enter your {es} index alias name. +. Click **Save {data-source} to Elastic**. + +You can manage your data views in **{project-settings} → {manage-app} → {data-views-app}**. + +[discrete] +[[-serverless-data-views-create-a-temporary-data-source]] +=== Create a temporary {data-source} + +Want to explore your data or create a visualization without saving it as a data view? +Select **Use without saving** in the **Create {data-source}** form in **Discover**. +With a temporary {data-source}, you can add fields and create an {es} query alert, just like you would a regular {data-source}. +Your work won't be visible to others in your space. + +A temporary {data-source} remains in your space until you change apps, or until you save it. + +// ![how to create an ad-hoc data view](https://images.contentstack.io/v3/assets/bltefdd0b53724fa2ce/blte3a4f3994c44c0cc/637eb0c95834861044c21a25/ad-hoc-data-view.gif) + +.Temporary {data-sources} are not available in the {data-views-app} app. +[NOTE] +==== + +==== + +//// +/* + +### Use {data-sources} with rolled up data + +A {data-source} can match one rollup index. For a combination rollup +{data-source} with both raw and rolled up data, use the standard notation: + +```ts +rollup_logstash,kibana_sample_data_logs +``` +For an example, refer to Create and visualize rolled up data. */ +//// + +//// +/* + +### Use {data-sources} with {ccs} + +If your {es} clusters are configured for [{ccs}]{(ref}/modules-cross-cluster-search.html), +you can create a {data-source} to search across the clusters of your choosing. +Specify data streams, indices, and aliases in a remote cluster using the +following syntax: + +```ts +: +``` + +To query {ls} indices across two {es} clusters +that you set up for {ccs}, named `cluster_one` and `cluster_two`: + +```ts +cluster_one:logstash-*,cluster_two:logstash-* +``` + +Use wildcards in your cluster names +to match any number of clusters. To search {ls} indices across +clusters named `cluster_foo`, `cluster_bar`, and so on: + +```ts +cluster_*:logstash-* +``` + +To query across all {es} clusters that have been configured for {ccs}, +use a standalone wildcard for your cluster name: + +```ts +*:logstash-* +``` + +To match indices starting with `logstash-`, but exclude those starting with `logstash-old`, from +all clusters having a name starting with `cluster_`: + +```ts +`cluster_*:logstash-*,cluster_*:-logstash-old*` +``` + +To exclude a cluster having a name starting with `cluster_`: + +```ts +`cluster_*:logstash-*,cluster_one:-*` +``` + +Once you configure a {data-source} to use the {ccs} syntax, all searches and +aggregations using that {data-source} in Elastic take advantage of {ccs}. */ +//// + +[discrete] +[[-serverless-data-views-delete-a-data-source]] +== Delete a {data-source} + +When you delete a {data-source}, you cannot recover the associated field formatters, runtime fields, source filters, +and field popularity data. +Deleting a {data-source} does not remove any indices or data documents from {es}. + +.Deleting a {data-source} breaks all visualizations, saved searches, and other saved objects that reference the data view. +[IMPORTANT] +==== + +==== + +. Go to **{project-settings} → {manage-app} → {data-views-app}**. +. Find the {data-source} that you want to delete, and then +click image:images/icons/trash.svg[Delete] in the **Actions** column. diff --git a/serverless/pages/data-views.mdx b/serverless/pages/project-settings/data-views.mdx similarity index 100% rename from serverless/pages/data-views.mdx rename to serverless/pages/project-settings/data-views.mdx diff --git a/serverless/pages/project-settings/files.asciidoc b/serverless/pages/project-settings/files.asciidoc new file mode 100644 index 0000000000..36ea44ea5d --- /dev/null +++ b/serverless/pages/project-settings/files.asciidoc @@ -0,0 +1,16 @@ +[[-serverless-files]] += {files-app} + +:description: Manage files that are stored in Elastic. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +Several {serverless-full} features let you upload files. For example, you can add files to https://www.elastic.co/docs/current/serverless/observability/cases[cases] or upload a logo to an **Image** panel in a https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-dashboards[dashboard]. + +You can access these uploaded files in **{project-settings} → {manage-app} → {files-app}**. + +[role="screenshot"] +image::images/file-management.png["Files UI"] diff --git a/serverless/pages/files.mdx b/serverless/pages/project-settings/files.mdx similarity index 100% rename from serverless/pages/files.mdx rename to serverless/pages/project-settings/files.mdx diff --git a/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc b/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc new file mode 100644 index 0000000000..111c1e3ac6 --- /dev/null +++ b/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc @@ -0,0 +1,18 @@ +[[-serverless-fleet-and-elastic-agent]] += Fleet and Elastic Agent + +:description: Centrally manage your Elastic Agents in Fleet +:keywords: serverless, ingest, fleet, elastic agent + +preview:[] + +This content applies to: + +{agent} is a single, unified way to add monitoring for logs, metrics, and other types of data to a host. +It can also protect hosts from security threats, query data from operating systems, forward data from remote services or hardware, and more. +A single agent makes it easier and faster to deploy monitoring across your infrastructure. +Each agent has a single policy you can update to add integrations for new data sources, security protections, and more. + +{fleet} provides a web-based UI to centrally manage your {agents} and their policies. + +To learn more, refer to the {fleet-guide}[Fleet and Elastic Agent documentation]. diff --git a/serverless/pages/fleet-and-elastic-agent.mdx b/serverless/pages/project-settings/fleet-and-elastic-agent.mdx similarity index 100% rename from serverless/pages/fleet-and-elastic-agent.mdx rename to serverless/pages/project-settings/fleet-and-elastic-agent.mdx diff --git a/serverless/pages/project-settings/index-management.asciidoc b/serverless/pages/project-settings/index-management.asciidoc new file mode 100644 index 0000000000..67ca8f5e03 --- /dev/null +++ b/serverless/pages/project-settings/index-management.asciidoc @@ -0,0 +1,281 @@ +[[-serverless-index-management]] += Index management + +:description: Perform CRUD operations on indices and data streams. View index settings, mappings, and statistics. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +Elastic's index management features are an easy, convenient way to manage your cluster's indices, data streams, index templates, and enrich policies. +Practicing good index management ensures your data is stored correctly and in the most cost-effective way possible. + +//// +/* data streams , and index +templates. */ +//// + +//// +/* +## What you'll learn + +You'll learn how to: + +* View and edit index settings. +* View mappings and statistics for an index. +* Perform index-level operations, such as refreshes. +* View and manage data streams. +* Create index templates to automatically configure new data streams and indices. + +TBD: Are these RBAC requirements valid for serverless? + +## Required permissions + +If you use {es} {security-features}, the following security privileges are required: + +* The `monitor` cluster privilege to access Elastic's **{index-manage-app}** features. +* The `view_index_metadata` and `manage` index privileges to view a data stream + or index's data. + +* The `manage_index_templates` cluster privilege to manage index templates. + +To add these privileges, go to **Management → Custom Roles**. + +*/ +//// + +[discrete] +[[-serverless-index-management-manage-indices]] +== Manage indices + +Go to **{project-settings} → {manage-app} → {index-manage-app}**: + +[role="screenshot"] +image::images/index-management-indices.png[{index-manage-app} UI] + +// TO-DO: This screenshot needs to be refreshed since it doesn't show all of the pertinent tabs + +The **{index-manage-app}** page contains an overview of your indices. + +//// +/* +TBD: Do these badges exist in serverless? +Badges indicate if an index is a follower index, a +rollup index, or frozen. Clicking a badge narrows the list to only indices of that type. */ +//// + +* To show details or perform operations, such as delete, click the index name. To perform operations +on multiple indices, select their checkboxes and then open the **Manage** menu. +* To filter the list of indices, use the search bar. +* To drill down into the index mappings, settings, and statistics, click an index name. From this view, you can navigate to **Discover** to further explore the documents in the index. + +// settings, mapping + +// ![{index-manage-app} UI](../images/index-management_index_details.png) + +// TO-DO: This screenshot needs to be refreshed since it doesn't show the appropriate context + +[discrete] +[[-serverless-index-management-manage-data-streams]] +== Manage data streams + +Investigate your data streams and address lifecycle management needs in the **Data Streams** view. + +The value in the **Indices** column indicates the number of backing indices. Click this number to drill down into details. + +A value in the data retention column indicates that the data stream is managed by a data stream lifecycle policy. + +This value is the time period for which your data is guaranteed to be stored. Data older than this period can be deleted by +{es} at a later time. + +[role="screenshot"] +image::images/management-data-stream.png[Data stream details] + +To view information about the stream's backing indices, click the number in the **Indices** column. + +* To view more information about a data stream, such as its generation or its +current index lifecycle policy, click the stream's name. From this view, you can navigate to **Discover** to +further explore data within the data stream. +* preview:[] To edit the data retention value, open the **Manage** menu, and then click **Edit data retention**. + +//// +/* +TO-DO: This screenshot is not accurate since it contains several toggles that don't exist in serverless. +![Backing index](../images/index-mgmt-management_index_data_stream_backing_index.png) +*/ +//// + +[discrete] +[[-serverless-index-management-manage-index-templates]] +== Manage index templates + +Create, edit, clone, and delete your index templates in the **Index Templates** view. Changes made to an index template do not affect existing indices. + +[role="screenshot"] +image::images/index-management-index-templates.png[Index templates] + +// TO-DO: This screenshot is missing some tabs that exist in serverless + +If you don't have any templates, you can create one using the **Create template** wizard. + +//// +/* +TO-DO: This walkthrough needs to be tested and updated for serverless. +### Try it: Create an index template + +In this tutorial, you'll create an index template and use it to configure two +new indices. + +**Step 1. Add a name and index pattern** + +1. In the **Index Templates** view, open the **Create template** wizard. + + ![Create wizard](../images/index-mgmt/-index-mgmt-management_index_create_wizard.png) + +1. In the **Name** field, enter `my-index-template`. + +1. Set **Index pattern** to `my-index-*` so the template matches any index + with that index pattern. + +1. Leave **Data Stream**, **Priority**, **Version**, and **_meta field** blank or as-is. + +**Step 2. Add settings, mappings, and aliases** + +1. Add component templates to your index template. + + Component templates are pre-configured sets of mappings, index settings, and + aliases you can reuse across multiple index templates. Badges indicate + whether a component template contains mappings (*M*), index settings (*S*), + aliases (*A*), or a combination of the three. + + Component templates are optional. For this tutorial, do not add any component + templates. + + ![Component templates page](../images/index-mgmt/-index-mgmt-management_index_component_template.png) + +1. Define index settings. These are optional. For this tutorial, leave this + section blank. + +1. Define a mapping that contains an object field named `geo` with a + child `geo_point` field named `coordinates`: + + ![Mapped fields page](../images/index-mgmt/-index-mgmt-management-index-templates-mappings.png) + + Alternatively, you can click the **Load JSON** link and define the mapping as JSON: + + ```js + { + "properties": { + "geo": { + "properties": { + "coordinates": { + "type": "geo_point" + } + } + } + } + + ``` + \\ NOTCONSOLE + + You can create additional mapping configurations in the **Dynamic templates** and + **Advanced options** tabs. For this tutorial, do not create any additional + mappings. + +1. Define an alias named `my-index`: + + ```js + { + "my-index": {} + } + ``` + \\ NOTCONSOLE + +1. On the review page, check the summary. If everything looks right, click + **Create template**. + +**Step 3. Create new indices** + +You’re now ready to create new indices using your index template. + +1. Index the following documents to create two indices: + `my-index-000001` and `my-index-000002`. + + ```console + POST /my-index-000001/_doc + { + "@timestamp": "2019-05-18T15:57:27.541Z", + "ip": "225.44.217.191", + "extension": "jpg", + "response": "200", + "geo": { + "coordinates": { + "lat": 38.53146222, + "lon": -121.7864906 + } + }, + "url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/charles-fullerton.jpg" + } + + POST /my-index-000002/_doc + { + "@timestamp": "2019-05-20T03:44:20.844Z", + "ip": "198.247.165.49", + "extension": "php", + "response": "200", + "geo": { + "coordinates": { + "lat": 37.13189556, + "lon": -76.4929875 + } + }, + "memory": 241720, + "url": "https://theacademyofperformingartsandscience.org/people/type:astronauts/name:laurel-b-clark/profile" + } + ``` + +1. Use the get index API to view the configurations for the + new indices. The indices were configured using the index template you created + earlier. + + ```console + GET /my-index-000001,my-index-000002 + ``` + \\ TEST[continued] +*/ +//// + +//// +/* +TO-DO:This page is missing information about the "Component templates" tab. +*/ +//// + +[discrete] +[[-serverless-index-management-manage-enrich-policies]] +== Manage enrich policies + +Use the **Enrich Policies** view to add data from your existing indices to incoming documents during ingest. +An {ref}/ingest-enriching-data.html[enrich policy] contains: + +* The policy type that determines how the policy matches the enrich data to incoming documents +* The source indices that store enrich data as documents +* The fields from the source indices used to match incoming documents +* The enrich fields containing enrich data from the source indices that you want to add to incoming documents +* An optional query. + +[role="screenshot"] +image::images/management-enrich-policies.png[Enrich policies] + +When creating an enrich policy, the UI walks you through the configuration setup and selecting the fields. +Before you can use the policy with an enrich processor, you must execute the policy. + +When executed, an enrich policy uses enrich data from the policy's source indices +to create a streamlined system index called the enrich index. The policy uses this index to match and enrich incoming documents. + +Check out these examples: + +* {ref}/geo-match-enrich-policy-type.html[Example: Enrich your data based on geolocation] +* {ref}/match-enrich-policy-type.html[Example: Enrich your data based on exact values] +* {ref}/range-enrich-policy-type.html[Example: Enrich your data by matching a value to a range] diff --git a/serverless/pages/index-management.mdx b/serverless/pages/project-settings/index-management.mdx similarity index 100% rename from serverless/pages/index-management.mdx rename to serverless/pages/project-settings/index-management.mdx diff --git a/serverless/pages/project-settings/ingest-pipelines.asciidoc b/serverless/pages/project-settings/ingest-pipelines.asciidoc new file mode 100644 index 0000000000..0129961e9f --- /dev/null +++ b/serverless/pages/project-settings/ingest-pipelines.asciidoc @@ -0,0 +1,58 @@ +[[-serverless-ingest-pipelines]] += {ingest-pipelines-cap} + +:description: Create and manage {ingest-pipelines} to perform common transformations and enrichments on your data. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +{ref}/ingest.html[{ingest-pipelines-cap}] let you perform common transformations on your data before indexing. +For example, you can use pipelines to remove fields, extract values from text, and enrich your data. + +A pipeline consists of a series of configurable tasks called processors. +Each processor runs sequentially, making specific changes to incoming documents. +After the processors have run, {es} adds the transformed documents to your data stream or index. + +//// +/* +TBD: Do these requirements apply in serverless? +## Prerequisites + +- Nodes with the ingest node role handle pipeline processing. To use ingest pipelines, your cluster must have at least one node with the ingest role. For heavy ingest loads, we recommend creating dedicated ingest nodes. +- If the Elasticsearch security features are enabled, you must have the manage_pipeline cluster privilege to manage ingest pipelines. To use Kibana’s Ingest Pipelines feature, you also need the cluster:monitor/nodes/info cluster privileges. +- Pipelines including the enrich processor require additional setup. See Enrich your data. +*/ +//// + +[discrete] +[[-serverless-ingest-pipelines-create-and-manage-pipelines]] +== Create and manage pipelines + +In **{project-settings} → {manage-app} → {ingest-pipelines-app}**, you can: + +* View a list of your pipelines and drill down into details +* Edit or clone existing pipelines +* Delete pipelines + +[role="screenshot"] +image::images/ingest-pipelines-management.png["{ingest-pipelines-app}"] + +To create a pipeline, click **Create pipeline → New pipeline**. +For an example tutorial, see {ref}/common-log-format-example.html[Example: Parse logs]. + +The **New pipeline from CSV** option lets you use a file with comma-separated values (CSV) to create an ingest pipeline that maps custom data to the Elastic Common Schema (ECS). +Mapping your custom data to ECS makes the data easier to search and lets you reuse visualizations from other data sets. +To get started, check {ecs-ref}/ecs-converting.html[Map custom data to ECS]. + +[discrete] +[[-serverless-ingest-pipelines-test-pipelines]] +== Test pipelines + +Before you use a pipeline in production, you should test it using sample documents. +When creating or editing a pipeline in **{ingest-pipelines-app}**, click **Add documents**. +In the **Documents** tab, provide sample documents and click **Run the pipeline**: + +[role="screenshot"] +image::images/ingest-pipelines-test.png["Test a pipeline in {ingest-pipelines-app}"] diff --git a/serverless/pages/ingest-pipelines.mdx b/serverless/pages/project-settings/ingest-pipelines.mdx similarity index 100% rename from serverless/pages/ingest-pipelines.mdx rename to serverless/pages/project-settings/ingest-pipelines.mdx diff --git a/serverless/pages/project-settings/integrations.asciidoc b/serverless/pages/project-settings/integrations.asciidoc new file mode 100644 index 0000000000..b8b061f31f --- /dev/null +++ b/serverless/pages/project-settings/integrations.asciidoc @@ -0,0 +1,16 @@ +[[-serverless-integrations]] += Integrations + +:description: Use our pre-built integrations to connect your data to Elastic. +:keywords: serverless, ingest, integration + +preview:[] + +This content applies to: + +Elastic integrations are a streamlined way to connect your data to Elastic. +Integrations are available for popular services and platforms, like Nginx, AWS, and MongoDB, +as well as many generic input types like log files. + +Integration documentation is available in the product when you install an integration, +or you can explore our https://www.elastic.co/docs/current/integrations[Elastic integrations documentation]. diff --git a/serverless/pages/integrations.mdx b/serverless/pages/project-settings/integrations.mdx similarity index 100% rename from serverless/pages/integrations.mdx rename to serverless/pages/project-settings/integrations.mdx diff --git a/serverless/pages/project-settings/logstash-pipelines.asciidoc b/serverless/pages/project-settings/logstash-pipelines.asciidoc new file mode 100644 index 0000000000..bc22e44b02 --- /dev/null +++ b/serverless/pages/project-settings/logstash-pipelines.asciidoc @@ -0,0 +1,78 @@ +[[-serverless-logstash-pipelines]] += {ls-pipelines} + +:description: Create, edit, and delete your {ls} pipeline configurations. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +In **{project-settings} → {manage-app} → {ls-pipelines-app}**, you can control multiple {ls} instances and pipeline configurations. + +[role="screenshot"] +image::images/logstash-pipelines-management.png[{ls-pipelines-app}"] + +On the {ls} side, you must enable configuration management and register {ls} to use the centrally managed pipeline configurations. + +[IMPORTANT] +==== +After you configure {ls} to use centralized pipeline management, you can no longer specify local pipeline configurations. +The `pipelines.yml` file and settings such as `path.config` and `config.string` are inactive when centralized pipeline management is enabled. +==== + +[discrete] +[[-serverless-logstash-pipelines-manage-pipelines]] +== Manage pipelines + +//// +/* +TBD: What is the appropriate RBAC for serverless? +If {kib} is protected with basic authentication, make sure your {kib} user has +the `logstash_admin` role as well as the `logstash_writer` role that you created +when you configured Logstash to use basic authentication. Additionally, +in order to view (as read-only) non-centrally-managed pipelines in the pipeline management +UI, make sure your {kib} user has the `monitoring_user` role as well. +*/ +//// + +. {logstash-ref}/configuring-centralized-pipelines.html[Configure centralized pipeline management]. +. To add a new pipeline, go to **{project-settings} → {manage-app} → {ls-pipelines-app}** and click **Create pipeline**. Provide the following details, then click **Create and deploy**. + +Pipeline ID:: +A name that uniquely identifies the pipeline. +This is the ID that you used when you configured centralized pipeline management and specified a list of pipeline IDs in the `xpack.management.pipeline.id` setting. + +Description:: +A description of the pipeline configuration. This information is for your use. + +Pipeline:: +The pipeline configuration. +You can treat the editor like any other editor. +You don't have to worry about whitespace or indentation. + +Pipeline workers:: +The number of parallel workers used to run the filter and output stages of the pipeline. + +Pipeline batch size:: +The maximum number of events an individual worker thread collects before +executing filters and outputs. + +Pipeline batch delay:: +Time in milliseconds to wait for each event before sending an undersized +batch to pipeline workers. + +Queue type:: +The internal queueing model for event buffering. +Options are `memory` for in-memory queueing and `persisted` for disk-based acknowledged queueing. + +Queue max bytes:: +The total capacity of the queue when persistent queues are enabled. + +Queue checkpoint writes:: +The maximum number of events written before a checkpoint is forced when +persistent queues are enabled. + +To delete one or more pipelines, select their checkboxes then click **Delete**. + +For more information about pipeline behavior, go to {logstash-ref}/logstash-centralized-pipeline-management.html#_pipeline_behavior[Centralized Pipeline Management]. diff --git a/serverless/pages/logstash-pipelines.mdx b/serverless/pages/project-settings/logstash-pipelines.mdx similarity index 100% rename from serverless/pages/logstash-pipelines.mdx rename to serverless/pages/project-settings/logstash-pipelines.mdx diff --git a/serverless/pages/project-settings/machine-learning.asciidoc b/serverless/pages/project-settings/machine-learning.asciidoc new file mode 100644 index 0000000000..d51fbe9dfc --- /dev/null +++ b/serverless/pages/project-settings/machine-learning.asciidoc @@ -0,0 +1,49 @@ +[[-serverless-machine-learning]] += {ml-cap} + +:description: View, export, and import {ml} jobs and models. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +To view your {ml} resources, go to **{project-settings} → {manage-app} → {ml-app}**: + +[role="screenshot"] +image::images/ml-security-management.png["Anomaly detection job management"] + +// TO-DO: This screenshot should be automated. + +The {ml-features} that are available vary by project type: + +* {es-serverless} projects have trained models. +* {observability} projects have {anomaly-jobs}. +* {security} projects have {anomaly-jobs}, {dfanalytics-jobs}, and trained models. + +For more information, go to {ml-docs}/ml-ad-overview.html[{anomaly-detect-cap}], {ml-docs}/ml-dfanalytics.html[{dfanalytics-cap}] and {ml-docs}/ml-nlp.html[Natural language processing]. + +[discrete] +[[-serverless-machine-learning-synchronize-saved-objects]] +== Synchronize saved objects + +Before you can view your {ml} {dfeeds}, jobs, and trained models in {kib}, they must have saved objects. +For example, if you used APIs to create your jobs, wait for automatic synchronization or go to the **{ml-app}** page and click **Synchronize saved objects**. + +[discrete] +[[-serverless-machine-learning-export-and-import-jobs]] +== Export and import jobs + +You can export and import your {ml} job and {dfeed} configuration details on the **{ml-app}** page. +For example, you can export jobs from your test environment and import them in your production environment. + +The exported file contains configuration details; it does not contain the {ml} models. +For {anomaly-detect}, you must import and run the job to build a model that is accurate for the new environment. +For {dfanalytics}, trained models are portable; you can import the job then transfer the model to the new cluster. +Refer to {ml-docs}/ml-trained-models.html#export-import[Exporting and importing {dfanalytics} trained models]. + +There are some additional actions that you must take before you can successfully import and run your jobs: + +* The {data-sources} that are used by {anomaly-detect} {dfeeds} and {dfanalytics} source indices must exist; otherwise, the import fails. +* If your {anomaly-jobs} use custom rules with filter lists, the filter lists must exist; otherwise, the import fails. +* If your {anomaly-jobs} were associated with calendars, you must create the calendar in the new environment and add your imported jobs to the calendar. diff --git a/serverless/pages/machine-learning.mdx b/serverless/pages/project-settings/machine-learning.mdx similarity index 100% rename from serverless/pages/machine-learning.mdx rename to serverless/pages/project-settings/machine-learning.mdx diff --git a/serverless/pages/project-settings/maintenance-windows.asciidoc b/serverless/pages/project-settings/maintenance-windows.asciidoc new file mode 100644 index 0000000000..cc9cf91d0a --- /dev/null +++ b/serverless/pages/project-settings/maintenance-windows.asciidoc @@ -0,0 +1,72 @@ +[[-serverless-maintenance-windows]] += {maint-windows-cap} + +:description: Suppress rule notifications for scheduled periods of time. +:keywords: serverless, Observability, Security + +preview:[] + +This content applies to: + +preview::[] + +You can schedule single or recurring {maint-windows} to temporarily reduce rule notifications. +For example, a maintenance window prevents false alarms during planned outages. + +Alerts continue to be generated, however notifications are suppressed as follows: + +* When an alert occurs during a maintenance window, there are no notifications. +When the alert recovers, there are no notifications--even if the recovery occurs after the maintenance window ends. +* When an alert occurs before a maintenance window and recovers during or after the maintenance window, notifications are sent as usual. + +//// +/* +TBD: What RBAC requirements exist in serverless? +## Configure access to maintenance windows + +To use maintenance windows, you must have the appropriate [subscription]{(subscriptions}) and {kib} feature privileges. + +- To have full access to maintenance windows, you must have `All` privileges for the **Management → Maintenance Windows*** feature. +- To have view-only access to maintenance windows, you must have `Read` privileges for the **Management → Maintenance Windows* feature. + +For more details, refer to {kib} privileges. +*/ +//// + +[discrete] +[[-serverless-maintenance-windows-create-and-manage-maint-windows]] +== Create and manage {maint-windows} + +In **{project-settings} → {manage-app} → {maint-windows-app}** you can create, edit, and archive {maint-windows}. + +When you create a maintenance window, you must provide a name and a schedule. +You can optionally configure it to repeat daily, monthly, yearly, or on a custom interval. + +[role="screenshot"] +image::images/create-maintenance-window.png[The Create Maintenance Window user interface in {kib}] + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +If you turn on **Filter alerts**, you can use KQL to filter the alerts affected by the maintenance window. +For example, you can suppress notifications for alerts from specific rules: + +[role="screenshot"] +image::images/create-maintenance-window-filter.png[The Create Maintenance Window user interface in {kib} with a filter] + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +[NOTE] +==== +* You can select only a single category when you turn on filters. +* Some rules are not affected by maintenance window filters because their alerts do not contain requisite data. +In particular, {kibana-ref}/kibana-alerts.html[{stack-monitor-app}], {kibana-ref}geo-alerting.html[tracking containment], {ml-docs}/ml-configuring-alerts.html[{anomaly-jobs} health], and {ref}/transform-alerts.html[transform health] rules are not affected by the filters. +==== + +A maintenance window can have any one of the following statuses: + +* `Upcoming`: It will run at the scheduled date and time. +* `Running`: It is running. +* `Finished`: It ended and does not have a repeat schedule. +* `Archived`: It is archived. In a future release, archived {maint-windows} will be queued for deletion. + +When you view alert details in {kib}, each alert shows unique identifiers for {maint-windows} that affected it. diff --git a/serverless/pages/maintenance-windows.mdx b/serverless/pages/project-settings/maintenance-windows.mdx similarity index 100% rename from serverless/pages/maintenance-windows.mdx rename to serverless/pages/project-settings/maintenance-windows.mdx diff --git a/serverless/pages/project-settings/maps.asciidoc b/serverless/pages/project-settings/maps.asciidoc new file mode 100644 index 0000000000..c9a5400b8e --- /dev/null +++ b/serverless/pages/project-settings/maps.asciidoc @@ -0,0 +1,98 @@ +[[-serverless-maps]] += {maps-app} + +:description: Create maps from your geographical data. +:keywords: serverless, Security + +preview:[] + +This content applies to: + +In **{project-settings} → {maps-app}** you can: + +* Build maps with multiple layers and indices. +* Animate spatial temporal data. +* Upload GeoJSON files and shapefiles. +* Embed your map in dashboards. +* Focus on only the data that's important to you. + +//// +/* +- Symbolize features using data values. +*/ +//// + +[discrete] +[[-serverless-maps-build-maps-with-multiple-layers-and-indices]] +== Build maps with multiple layers and indices + +Use multiple layers and indices to show all your data in a single map. +Show how data sits relative to physical features like weather patterns, human-made features like international borders, and business-specific features like sales regions. +Plot individual documents or use aggregations to plot any data set, no matter how large. + +[role="screenshot"] +image::images/sample_data_ecommerce_map.png[A world map with country and revenue layers] + +Go to **{project-settings} → {maps-app}** and click **Add layer**. +To learn about specific types of layers, check out {kibana-ref}/heatmap-layer.html[Heat map layer], {kibana-ref}/tile-layer.html[Tile layer], and {kibana-ref}/vector-layer.html[Vector layer]. + +[discrete] +[[-serverless-maps-animate-spatial-temporal-data]] +== Animate spatial temporal data + +Data comes to life with animation. +Hard to detect patterns in static data pop out with movement. +Use time slider to animate your data and gain deeper insights. + +This animated map uses the time slider to show Portland buses over a period of 15 minutes. +The routes come alive as the bus locations update with time. + +[role="screenshot"] +image::images/timeslider_map.gif[An animated city map of Portland with changing bus locations] + +To create this type of map, check out {kibana-ref}/asset-tracking-tutorial.html[Track, visualize, and alert assets in real time]. + +[discrete] +[[-serverless-maps-upload-geojson-files-and-shapefiles]] +== Upload GeoJSON files and shapefiles + +Use **{maps-app}** to drag and drop your GeoJSON and shapefile data and then use them as layers in your map. +Check out {kibana-ref}/import-geospatial-data.html[Import geospatial data]. + +[discrete] +[[-serverless-maps-embed-your-map-in-dashboards]] +== Embed your map in dashboards + +Viewing data from different angles provides better insights. +Dimensions that are obscured in one visualization might be illuminated in another. +Add your map to a https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-dashboards[dashboard] and view your geospatial data alongside bar charts, pie charts, tag clouds, and more. + +This choropleth map shows the density of non-emergency service requests in San Diego by council district. +The map is embedded in a dashboard, so users can better understand when services are requested and gain insight into the top requested services. + +[role="screenshot"] +image::images/embed_dashboard_map.jpeg[A dashboard with a map, bar chart, and tag cloud] + +For a detailed example, check out {kibana-ref}/maps-getting-started.html[Build a map to compare metrics by country or region]. + +//// +/* +TBD: There doesn't seem to be content to link to for this section, so it's omitted for now. +## Symbolize features using data values + +Customize each layer to highlight meaningful dimensions in your data. +For example, use dark colors to symbolize areas with more web log traffic, and lighter colors to symbolize areas with less traffic. +*/ +//// + +[discrete] +[[-serverless-maps-focus-on-only-the-data-thats-important-to-you]] +== Focus on only the data that's important to you + +Search across the layers in your map to focus on just the data you want. +Combine free text search with field-based search using the {kib} Query Language (KQL) +Set the time filter to restrict layers by time. +Draw a polygon on the map or use the shape from features to create spatial filters. +Filter individual layers to compares facets. + +Check out {kibana-ref}/maps-search.html[Search geographic data]. diff --git a/serverless/pages/maps.mdx b/serverless/pages/project-settings/maps.mdx similarity index 100% rename from serverless/pages/maps.mdx rename to serverless/pages/project-settings/maps.mdx diff --git a/serverless/pages/project-settings/project-and-management-settings.asciidoc b/serverless/pages/project-settings/project-and-management-settings.asciidoc new file mode 100644 index 0000000000..6c2c4404b9 --- /dev/null +++ b/serverless/pages/project-settings/project-and-management-settings.asciidoc @@ -0,0 +1,24 @@ +[[-serverless-project-and-management-settings]] += Project and management settings + +:description: Learn about capabilities available in multiple serverless solutions. +:keywords: serverless, observability, security, elasticsearch, overview + +preview:[] + +The documentation in this section describes shared capabilities that are available in multiple solutions. +Look for the doc badge on each page to see if the page is valid for your solution: + +* for the {es} solution +* for the {observability} solution +* for the {security} solution + +[IMPORTANT] +==== +Some solutions provide versions of these capabilities tailored to your use case. +Read the main solution docs to learn how to use those capabilities: + +* https://www.elastic.co/docs/current/serverless/elasticsearch/what-is-elasticsearch-serverless[{es-serverless} docs] +* https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[{observability} serverless docs] +* https://www.elastic.co/docs/current/serverless/security/what-is-security-serverless[{security} serverless docs] +==== diff --git a/serverless/pages/project-and-management-settings.mdx b/serverless/pages/project-settings/project-and-management-settings.mdx similarity index 100% rename from serverless/pages/project-and-management-settings.mdx rename to serverless/pages/project-settings/project-and-management-settings.mdx diff --git a/serverless/pages/project-settings/project-settings.asciidoc b/serverless/pages/project-settings/project-settings.asciidoc new file mode 100644 index 0000000000..342d166b0a --- /dev/null +++ b/serverless/pages/project-settings/project-settings.asciidoc @@ -0,0 +1,90 @@ +[[-serverless-project-settings]] += Management settings + +:description: Manage your indices, data views, saved objects, settings, and more from a central location in Elastic. +:keywords: serverless, management, overview + +preview:[] + +Go to **Project Settings** to manage your indices, data views, saved objects, settings, and more. + +Access to individual features is governed by Elastic user roles. +Consult your administrator if you do not have the appropriate access. +To learn more about roles, refer to <>. + +|=== +| Feature | Description | Available in + +| https://www.elastic.co/docs/current/serverless/api-keys[API keys] +| Create and manage keys that can send requests on behalf of users. +| + +| https://www.elastic.co/docs/current/serverless/security/asset-criticality[Asset criticality] +| Bulk assign asset criticality to multiple entities by importing a text file. +| + +| https://www.elastic.co/docs/current/serverless/action-connectors[] +| Create and manage reusable connectors for triggering actions. +| + +| https://www.elastic.co/docs/current/serverless/data-views[] +| Manage the fields in the data views that retrieve your data from {es}. +| + +| https://www.elastic.co/docs/current/serverless/security/entity-risk-scoring[Entity Risk Score] +| Manage entity risk scoring, and preview risky entities. +| + +| https://www.elastic.co/docs/current/serverless/files[] +| Manage files that are stored in {kib}. +| + +| https://www.elastic.co/docs/current/serverless/index-management[] +| View index settings, mappings, and statistics and perform operations on indices. +| + +| https://www.elastic.co/docs/current/serverless/ingest-pipelines[] +| Create and manage ingest pipelines that parse, transform, and enrich your data. +| + +| https://www.elastic.co/docs/current/serverless/logstash-pipelines[] +| Create and manage {ls} pipelines that parse, transform, and enrich your data. +| + +| https://www.elastic.co/docs/current/serverless/machine-learning[] +| View, export, and import your {anomaly-detect} and {dfanalytics} jobs and trained models. +| + +| https://www.elastic.co/docs/current/serverless/maintenance-windows[] +| Suppress rule notifications for scheduled periods of time. +| + +| https://www.elastic.co/docs/current/serverless/maps[] +| Create maps from your geographical data. +| + +| https://www.elastic.co/docs/current/serverless/reports[] +| Manage and download reports such as CSV files generated from saved searches. +| + +| https://www.elastic.co/docs/current/serverless/rules[] +| Create and manage rules that generate alerts. +| + +| https://www.elastic.co/docs/current/serverless/saved-objects[] +| Copy, edit, delete, import, and export your saved objects. +These include dashboards, visualizations, maps, {data-sources}, and more. +| + +| https://www.elastic.co/docs/current/serverless/spaces[] +| Organize your project and objects into multiple spaces. +| + +| https://www.elastic.co/docs/current/serverless/tags[] +| Create, manage, and assign tags to your saved objects. +| + +| https://www.elastic.co/docs/current/serverless/transforms[] +| Use transforms to pivot existing {es} indices into summarized or entity-centric indices. +| +|=== diff --git a/serverless/pages/project-settings.mdx b/serverless/pages/project-settings/project-settings.mdx similarity index 100% rename from serverless/pages/project-settings.mdx rename to serverless/pages/project-settings/project-settings.mdx diff --git a/serverless/pages/project-settings/reports.asciidoc b/serverless/pages/project-settings/reports.asciidoc new file mode 100644 index 0000000000..8c0739b184 --- /dev/null +++ b/serverless/pages/project-settings/reports.asciidoc @@ -0,0 +1,24 @@ +[[-serverless-reports]] += {reports-app} + +:description: View and manage generated reports. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +{kib} provides you with several options to share saved searches, dashboards, and visualizations. + +For example, in **Discover**, you can create and download comma-separated values (CSV) files for saved searches. + +To view and manage reports, go to **{project-settings} → {manage-app} → {reports-app}**. + +[role="screenshot"] +image::images/reports-management.png["{reports-app}"] + +// TBD: This image was refreshed but should be automated + +You can download or view details about the report by clicking the icons in the actions menu. + +To delete one or more reports, select their checkboxes then click **Delete reports**. diff --git a/serverless/pages/reports.mdx b/serverless/pages/project-settings/reports.mdx similarity index 100% rename from serverless/pages/reports.mdx rename to serverless/pages/project-settings/reports.mdx diff --git a/serverless/pages/project-settings/rules.asciidoc b/serverless/pages/project-settings/rules.asciidoc new file mode 100644 index 0000000000..d9db287a56 --- /dev/null +++ b/serverless/pages/project-settings/rules.asciidoc @@ -0,0 +1,176 @@ +[[-serverless-rules]] += {rules-app} + +:description: Alerting works by running checks on a schedule to detect conditions defined by a rule. +:keywords: serverless, Elasticsearch, alerting, learn + +preview:[] + +This content applies to: + +In general, a rule consists of three parts: + +* _Conditions_: what needs to be detected? +* _Schedule_: when/how often should detection checks run? +* _Actions_: what happens when a condition is detected? + +For example, when monitoring a set of servers, a rule might: + +* Check for average CPU usage > 0.9 on each server for the last two minutes (condition). +* Check every minute (schedule). +* Send a warning email message via SMTP with subject `CPU on {{server}} is high` (action). + +//// +/* ![Three components of a rule](../images/what-is-a-rule.svg) + +The following sections describe each part of the rule in more detail. */ +//// + +[discrete] +[[-serverless-rules-conditions]] +== Conditions + +Each project type supports a specific set of rule types. +Each _rule type_ provides its own way of defining the conditions to detect, but an expression formed by a series of clauses is a common pattern. +For example, in an {es} query rule, you specify an index, a query, and a threshold, which uses a metric aggregation operation (`count`, `average`, `max`, `min`, or `sum`): + +[role="screenshot"] +image::images/es-query-rule-conditions.png[UI for defining rule conditions in an {es} query rule] + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +[discrete] +[[-serverless-rules-schedule]] +== Schedule + +// Rule schedules are defined as an interval between subsequent checks, and can range from a few seconds to months. + +All rules must have a check interval, which defines how often to evaluate the rule conditions. +Checks are queued; they run as close to the defined value as capacity allows. + +[IMPORTANT] +==== +The intervals of rule checks in {kib} are approximate. Their timing is affected by factors such as the frequency at which tasks are claimed and the task load on the system. Refer to {kibana-ref}/alerting-production-considerations.html[Alerting production considerations] + +// missing linkAlerting production considerations +==== + +[discrete] +[[-serverless-rules-actions]] +== Actions + +You can add one or more actions to your rule to generate notifications when its conditions are met. +Recovery actions likewise run when rule conditions are no longer met. + +When defining actions in a rule, you specify: + +* A connector +* An action frequency +* A mapping of rule values to properties exposed for that type of action + +Each action uses a connector, which provides connection information for a {kib} service or third party integration, depending on where you want to send the notifications. +The specific list of connectors that you can use in your rule vary by project type. +Refer to https://www.elastic.co/docs/current/serverless/action-connectors[]. + +// If no connectors exist, click **Add connector** to create one. + +After you select a connector, set the _action frequency_. +If you want to reduce the number of notifications you receive without affecting their timeliness, some rule types support alert summaries. +For example, if you create an {es} query rule, you can set the action frequency such that you receive summaries of the new, ongoing, and recovered alerts on a custom interval: + +[role="screenshot"] +image::images/es-query-rule-action-summary.png[UI for defining rule conditions in an {es} query rule] + +// + +Alternatively, you can set the action frequency such that the action runs for each alert. +If the rule type does not support alert summaries, this is your only available option. +You must choose when the action runs (for example, at each check interval, only when the alert status changes, or at a custom action interval). +You must also choose an action group, which affects whether the action runs. +Each rule type has a specific set of valid action groups. +For example, you can set _Run when_ to `Query matched` or `Recovered` for the {es} query rule: + +[role="screenshot"] +image::images/es-query-rule-recovery-action.png[UI for defining a recovery action] + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +Each connector supports a specific set of actions for each action group and enables different action properties. +For example, you can have actions that create an {opsgenie} alert when rule conditions are met and recovery actions that close the {opsgenie} alert. + +Some types of rules enable you to further refine the conditions under which actions run. +For example, you can specify that actions run only when an alert occurs within a specific time frame or when it matches a KQL query. + +[TIP] +==== +If you are not using alert summaries, actions are triggered per alert and a rule can end up generating a large number of actions. Take the following example where a rule is monitoring three servers every minute for CPU usage > 0.9, and the action frequency is `On check intervals`: + +* Minute 1: server X123 > 0.9. _One email_ is sent for server X123. +* Minute 2: X123 and Y456 > 0.9. _Two emails_ are sent, one for X123 and one for Y456. +* Minute 3: X123, Y456, Z789 > 0.9. _Three emails_ are sent, one for each of X123, Y456, Z789. + +In this example, three emails are sent for server X123 in the span of 3 minutes for the same rule. Often, it's desirable to suppress these re-notifications. If +you set the action frequency to `On custom action intervals` with an interval of 5 minutes, you reduce noise by getting emails only every 5 minutes for +servers that continue to exceed the threshold: + +* Minute 1: server X123 > 0.9. _One email_ will be sent for server X123. +* Minute 2: X123 and Y456 > 0.9. _One email_ will be sent for Y456. +* Minute 3: X123, Y456, Z789 > 0.9. _One email_ will be sent for Z789. + +To get notified only once when a server exceeds the threshold, you can set the action frequency to `On status changes`. Alternatively, if the rule type supports alert summaries, consider using them to reduce the volume of notifications. +==== + +//// +/* +Each action definition is therefore a template: all the parameters needed to invoke a service are supplied except for specific values that are only known at the time the rule condition is detected. + +In the server monitoring example, the `email` connector type is used, and `server` is mapped to the body of the email, using the template string `CPU on {{server}} is high`. + +When the rule detects the condition, it creates an alert containing the details of the condition. */ +//// + +[discrete] +[[-serverless-rules-action-variables]] +=== Action variables + +You can pass rule values to an action at the time a condition is detected. +To view the list of variables available for your rule, click the "add rule variable" button: + +[role="screenshot"] +image::images/es-query-rule-action-variables.png[Passing rule values to an action] + +// NOTE: This is an autogenerated screenshot. Do not edit it directly. + +For more information about common action variables, refer to {kibana-ref}/rule-action-variables.html[Rule actions variables] + +// missing link + +[discrete] +[[-serverless-rules-alerts]] +== Alerts + +When checking for a condition, a rule might identify multiple occurrences of the condition. +{kib} tracks each of these alerts separately. +Depending on the action frequency, an action occurs per alert or at the specified alert summary interval. + +Using the server monitoring example, each server with average CPU > 0.9 is tracked as an alert. +This means a separate email is sent for each server that exceeds the threshold whenever the alert status changes. + +// ![{kib} tracks each detected condition as an alert and takes action on each alert](../images/alerting.svg) + +[discrete] +[[-serverless-rules-putting-it-all-together]] +== Putting it all together + +A rule consists of conditions, actions, and a schedule. +When conditions are met, alerts are created that render actions and invoke them. +To make action setup and update easier, actions use connectors that centralize the information used to connect with {kib} services and third-party integrations. +The following example ties these concepts together: + +[role="screenshot"] +image::images/rule-concepts-summary.svg[Rules, connectors, alerts and actions work together to convert detection into action] + +. Any time a rule's conditions are met, an alert is created. This example checks for servers with average CPU > 0.9. Three servers meet the condition, so three alerts are created. +. Alerts create actions according to the action frequency, as long as they are not muted or throttled. When actions are created, its properties are filled with actual values. In this example, three actions are created when the threshold is met, and the template string `{{server}}` is replaced with the appropriate server name for each alert. +. {kib} runs the actions, sending notifications by using a third party integration like an email service. +. If the third party integration has connection parameters or credentials, {kib} fetches these from the appropriate connector. diff --git a/serverless/pages/rules.mdx b/serverless/pages/project-settings/rules.mdx similarity index 100% rename from serverless/pages/rules.mdx rename to serverless/pages/project-settings/rules.mdx diff --git a/serverless/pages/project-settings/saved-objects.asciidoc b/serverless/pages/project-settings/saved-objects.asciidoc new file mode 100644 index 0000000000..5254d1041d --- /dev/null +++ b/serverless/pages/project-settings/saved-objects.asciidoc @@ -0,0 +1,115 @@ +[[-serverless-saved-objects]] += Saved objects + +:description: Manage your saved objects, including dashboards, visualizations, maps, {data-sources}, and more. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +To get started, go to **{project-settings} → {manage-app} → {saved-objects-app}**: + +[role="screenshot"] +image::images/saved-object-management.png[{saved-objects-app}] + +// TO-DO: This screenshot needs to be refreshed and automated. + +//// +/* +TBD: Need serverless-specific RBAC requirements +## Required permissions + +To access **Saved Objects**, you must have the required `Saved Objects Management` {kib} privilege. + +To add the privilege, open the main menu, and then click **Management → Roles**. + + +Granting access to `Saved Objects Management` authorizes users to +manage all saved objects in {kib}, including objects that are managed by +applications they may not otherwise be authorized to access. + */ +//// + +[discrete] +[[-serverless-saved-objects-view-and-delete]] +== View and delete + +* To view and edit a saved object in its associated application, click the object title. +* To show objects that use this object, so you know the impact of deleting it, click the actions icon image:images/icons/boxesHorizontal.svg[More actions] and then select **Relationships**. +* To delete one or more objects, select their checkboxes, and then click **Delete**. + +[discrete] +[[-serverless-saved-objects-import-and-export]] +== Import and export + +Use import and export to move objects between different {kib} instances. +These actions are useful when you have multiple environments for development and production. +Import and export also work well when you have a large number of objects to update and want to batch the process. + +//// +/* +TBD: Do these APIs exist for serverless? +{kib} also provides import and +export APIs to automate this process. +*/ +//// + +[discrete] +[[-serverless-saved-objects-import]] +=== Import + +Import multiple objects in a single operation. + +. In the toolbar, click **Import**. +. Select the NDJSON file that includes the objects you want to import. +. Select the import options. By default, saved objects already in {kib} are overwritten. +. Click **Import**. + +//// +/* +TBD: Are these settings configurable in serverless? + +The `savedObjects.maxImportExportSize` configuration setting +limits the number of saved objects to include in the file. The +`savedObjects.maxImportPayloadBytes` setting limits the overall +size of the file that you can import. + +*/ +//// + +[discrete] +[[-serverless-saved-objects-export]] +=== Export + +Export objects by selection or type. + +* To export specific objects, select them in the table, and then click **Export**. +* To export objects by type, click **Export objects** in the toolbar. + +{kib} creates an NDJSON with all your saved objects. +By default, the NDJSON includes child objects related to the saved objects. +Exported dashboards include their associated {data-sources}. + +//// +/* +TBD: Are these settings configurable in serverless? + +The `savedObjects.maxImportExportSize` configuration setting limits the number of saved objects that you can export. + */ +//// + +[discrete] +[[-serverless-saved-objects-copy-to-other-spaces]] +== Copy to other spaces + +Copy saved objects and their related objects between spaces. + +. Click the actions icon image:images/icons/boxesHorizontal.svg[Actions]. +. Click **Copy to spaces**. +. Specify whether to automatically overwrite any objects that already exist +in the target spaces, or resolve them on a per-object basis. +. Select the spaces in which to copy the object. + +The copy operation automatically includes child objects that are related to +the saved object. diff --git a/serverless/pages/saved-objects.mdx b/serverless/pages/project-settings/saved-objects.mdx similarity index 100% rename from serverless/pages/saved-objects.mdx rename to serverless/pages/project-settings/saved-objects.mdx diff --git a/serverless/pages/project-settings/spaces.asciidoc b/serverless/pages/project-settings/spaces.asciidoc new file mode 100644 index 0000000000..54be346adf --- /dev/null +++ b/serverless/pages/project-settings/spaces.asciidoc @@ -0,0 +1,67 @@ +[[-serverless-spaces]] += Spaces + +:description: Organize your project and objects into multiple spaces. + +This content applies to: + +Spaces enable you to organize your dashboards and other saved +objects into meaningful categories. Once inside a space, you see only +the dashboards and saved objects that belong to that space. + +When you create and enter a new project, you're using the default space of that project. + +You can identify the space you're in or switch to a different space from the header. + +[role="screenshot"] +image::images/space-breadcrumb.png[Space breadcrumb] + +You can view and manage the spaces of a project from the **Spaces** page in **Management**. + +[discrete] +[[-serverless-spaces-required-permissions]] +== Required permissions + +You must have an admin role on the project to manage its **Spaces**. + +[discrete] +[[-serverless-spaces-create-or-edit-a-space]] +== Create or edit a space + +You can have up to 100 spaces in a project. + +. Click **Create space** or select the space you want to edit. +. Provide: ++ +** A meaningful name and description for the space. +** A URL identifier. The URL identifier is a short text string that becomes part of the {kib} URL. {kib} suggests a URL identifier based on the name of your space, but you can customize the identifier to your liking. You cannot change the space identifier later. +. Customize the avatar of the space to your liking. +. Save the space. + +{kib} also has an https://www.elastic.co/docs/api/doc/serverless/group/endpoint-spaces[API] +if you prefer to create spaces programmatically. + +ifeval::["{serverlessCustomRoles}" == "true"] +[discrete] +[[-serverless-spaces-customize-access-to-space]] +== Customize access to space + +Customizing access to a space is available for the following project types only: + +As an administrator, you can define custom roles with specific access to certain spaces and features in a project. Refer to https://www.elastic.co/docs/current/serverless/custom-roles[]. +endif::[] + +[discrete] +[[-serverless-spaces-delete-a-space]] +== Delete a space + +Deleting a space permanently removes the space and all of its contents. +Find the space on the _Spaces_ page and click the trash icon in the Actions column. + +You can't delete the default space, but you can customize it to your liking. + +[discrete] +[[-serverless-spaces-move-saved-objects-between-spaces]] +== Move saved objects between spaces + +To move saved objects between spaces, you can https://www.elastic.co/docs/current/serverless/saved-objects[copy objects] or https://www.elastic.co/docs/current/serverless/saved-objects[export and import objects]. diff --git a/serverless/pages/spaces.mdx b/serverless/pages/project-settings/spaces.mdx similarity index 100% rename from serverless/pages/spaces.mdx rename to serverless/pages/project-settings/spaces.mdx diff --git a/serverless/pages/project-settings/tags.asciidoc b/serverless/pages/project-settings/tags.asciidoc new file mode 100644 index 0000000000..e418ccca65 --- /dev/null +++ b/serverless/pages/project-settings/tags.asciidoc @@ -0,0 +1,76 @@ +[[-serverless-tags]] += {tags-app} + +:description: Use tags to categorize your saved objects, then filter for related objects based on shared tags. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +To get started, go to **{project-settings} → {manage-app} → {tags-app}**: + +[role="screenshot"] +image::images/tag-management.png[Tags management] + +//// +/* +TBD: What are the serverless RBAC requirements? +## Required permissions + +To create tags, you must meet the minimum requirements. + +* Access to **Tags** requires the `Tag Management` Kibana privilege. To add the privilege, open the main menu, + and then click **Management → Custom Roles**. + +* The `read` privilege allows you to assign tags to the saved objects for which you have write permission. +* The `write` privilege enables you to create, edit, and delete tags. + + +Having the `Tag Management` {kib} privilege is not required to +view tags assigned on objects you have `read` access to, or to filter objects by tags +from the global search. + +*/ +//// + +[discrete] +[[-serverless-tags-create-a-tag]] +== Create a tag + +Create a tag to assign to your saved objects. + +. Click **Create tag**. +. Enter a name and select a color for the new tag. ++ +The name cannot be longer than 50 characters. +. Click **Create tag**. + +[discrete] +[[-serverless-tags-assign-a-tag-to-an-object]] +== Assign a tag to an object + +//// +/* +TBD: Do these RBAC requirements exist in serverless? +To assign and remove tags, you must have `write` permission on the objects to which you assign the tags. +*/ +//// + +. Find the tag you want to assign. +. Click the actions icon and then select **Manage assignments**. +. Select the objects to which you want to assign or remove tags. +[role="screenshot"] +image::images/tag-assignment.png[Assign tags to saved objects] +. Click **Save tag assignments**. + +[discrete] +[[-serverless-tags-delete-a-tag]] +== Delete a tag + +When you delete a tag, you remove it from all saved objects that use it. + +. Click the actions icon, and then select **Delete**. +. Click **Delete tag**. + +To assign, delete, or clear multiple tags, select them in the **Tags** view, and then select the action from the **selected tags** menu. diff --git a/serverless/pages/tags.mdx b/serverless/pages/project-settings/tags.mdx similarity index 100% rename from serverless/pages/tags.mdx rename to serverless/pages/project-settings/tags.mdx diff --git a/serverless/pages/project-settings/transforms.asciidoc b/serverless/pages/project-settings/transforms.asciidoc new file mode 100644 index 0000000000..a95f25fcf4 --- /dev/null +++ b/serverless/pages/project-settings/transforms.asciidoc @@ -0,0 +1,44 @@ +[[-serverless-transforms]] += {transforms-app} + +:description: Use transforms to pivot existing indices into summarized or entity-centric indices. +:keywords: serverless, Elasticsearch, Observability, Security + +preview:[] + +This content applies to: + +{transforms-cap} enable you to convert existing {es} indices into summarized +indices, which provide opportunities for new insights and analytics. + +For example, you can use {transforms} to pivot your data into entity-centric +indices that summarize the behavior of users or sessions or other entities in +your data. Or you can use {transforms} to find the latest document among all the +documents that have a certain unique key. + +For more information, check out: + +* {ref}/transform-usage.html[When to use transforms] +* {ref}/transform-alerts.html[Generating alerts for transforms] +* {ref}/transform-scale.html[Transforms at scale] +* {ref}/transform-checkpoints.html[How checkpoints work] +* {ref}/transform-examples.html[Examples] +* {ref}/transform-painless-examples.html[Painless examples] +* {ref}/transform-troubleshooting.html[Troubleshooting transforms] +* {ref}/transform-limitations.html[Limitations] + +[discrete] +[[-serverless-transforms-create-and-manage-transforms]] +== Create and manage {transforms} + +In **{project-settings} → {manage-app} → {transforms-app}**, you can +create, edit, stop, start, reset, and delete {transforms}: + +[role="screenshot"] +image::images/transform-management.png["{transforms-app} app"] + +When you create a {transform}, you must choose between two types: _pivot_ and _latest_. +You must also decide whether you want the {transform} to run once or continuously. +For more information, go to {ref}/transform-overview.html[{transforms-cap} overview]. + +// To stop, start, or delete multiple {transforms}, select their checkboxes then click.... diff --git a/serverless/pages/transforms.mdx b/serverless/pages/project-settings/transforms.mdx similarity index 100% rename from serverless/pages/transforms.mdx rename to serverless/pages/project-settings/transforms.mdx diff --git a/serverless/pages/visualize-library.asciidoc b/serverless/pages/visualize-library.asciidoc new file mode 100644 index 0000000000..cca0403649 --- /dev/null +++ b/serverless/pages/visualize-library.asciidoc @@ -0,0 +1,28 @@ +[[-serverless-visualize-library]] += Visualize Library + +:keywords: serverless, Elasticsearch, Observability, Security + +//// +/* TODO: Figure out best way to deal with inconsistent location of these capabilities in different solutions. +This content has been removed from the navigation for now because it's not useful in its current state.*/ +//// + +This content applies to: + +The **Visualize Library** is a space where you can save visualization panels that you may want to use across multiple dashboards. The **Visualize Library** consists of two pages: + +* **Visualizations** +* **Annotation groups** + +[discrete] +[[-serverless-visualize-library-visualizations]] +== Visualizations + +By default the **Visualizations** page opens first. Here you can create new visualizations, or select from a list of previously created visualizations. To learn more, refer to https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-visualizations[save to the Visualize Library]. + +[discrete] +[[-serverless-visualize-library-annotation-groups]] +== Annotation groups + +**Annotation groups** give you the option to mark points on a visualization panel with events, such as a deployment, to help track performance. These annotations can be reused across multiple visualization panels. diff --git a/serverless/pages/welcome-to-serverless.asciidoc b/serverless/pages/welcome-to-serverless.asciidoc new file mode 100644 index 0000000000..5188c00e75 --- /dev/null +++ b/serverless/pages/welcome-to-serverless.asciidoc @@ -0,0 +1,88 @@ +++++ + +++++ + +preview::[] + +Elastic serverless products allow you to deploy and use Elastic for your use cases without managing the underlying Elastic cluster, +such as nodes, data tiers, and scaling. Serverless instances are fully-managed, autoscaled, and automatically upgraded by Elastic so you can +focus more on gaining value and insight from your data. + +Elastic provides three serverless solutions available on {ecloud}: + +* **{es}**: Build powerful applications and search experiences using a rich ecosystem of vector search capabilities, APIs, and libraries. +* **Elastic {observability}**: Monitor your own platforms and services using powerful machine learning and analytics tools with your logs, metrics, traces, and APM data. +* **Elastic {security}**: Detect, investigate, and respond to threats, with SIEM, endpoint protection, and AI-powered analytics capabilities. + +Serverless instances of the Elastic Stack that you create in {ecloud} are called **serverless projects**. + +Elastic serverless products are currently in preview. https://www.elastic.co/blog/elastic-serverless-architecture[Learn more about serverless in our blog]. + +[discrete] +== Get started + +Choose the type of project that matches your needs and we’ll help you get started with our solution guides. + +:hardbreaks-option: + +[cols="1,1"] +|=== +| +| + +| image:https://www.elastic.co/docs/assets/images/elasticsearch.png[width=150] +a| [.card-title]#Elasticsearch# +Build custom search applications with Elasticsearch. + +<> + +| image:https://www.elastic.co/docs/assets/images/observability.png[width=150] +a| [.card-title]#Observability# +Monitor applications and systems with Elastic Observability. + +<> + +| image:https://www.elastic.co/docs/assets/images/security.png[width=150] +a| [.card-title]#Security# +Detect, investigate, and respond to threats with Elastic Security. + +<> + +| +| +|=== + +[discrete] +== Featured topics + +|=== +| + +a| <> +Invite new members to your organization. + +a| <> +Assign user roles and privileges to members in your organization. + +a| <> +Manage your project data, search power, and more. + +a| <> +View the details about your subscription. + +a| <> +Check past and current usage for your projects. + +a| <> +Manage your indices, data views, and more. +|=== + +:hardbreaks-option!: diff --git a/serverless/partials/deploy-nlp-model-dense-vector.asciidoc b/serverless/partials/deploy-nlp-model-dense-vector.asciidoc new file mode 100644 index 0000000000..e23d9bb54b --- /dev/null +++ b/serverless/partials/deploy-nlp-model-dense-vector.asciidoc @@ -0,0 +1,2 @@ +To deploy a third-party text embedding model, refer to +{ml-docs}/ml-nlp-text-emb-vector-search-example.html#ex-te-vs-deploy[Deploy a text embedding model]. diff --git a/serverless/partials/deploy-nlp-model-elser.asciidoc b/serverless/partials/deploy-nlp-model-elser.asciidoc new file mode 100644 index 0000000000..7a2699dad4 --- /dev/null +++ b/serverless/partials/deploy-nlp-model-elser.asciidoc @@ -0,0 +1,2 @@ +To deploy ELSER, refer to +{ml-docs}/ml-nlp-elser.html#download-deploy-elser[Download and deploy ELSER]. diff --git a/serverless/partials/deploy-nlp-model-widget.asciidoc b/serverless/partials/deploy-nlp-model-widget.asciidoc new file mode 100644 index 0000000000..a3499ea4a9 --- /dev/null +++ b/serverless/partials/deploy-nlp-model-widget.asciidoc @@ -0,0 +1,26 @@ + + +++++ +
+
+ + +
+
+++++ +include::./deploy-nlp-model-elser.asciidoc[] + +++++ +
+ +
+++++ diff --git a/serverless/partials/field-mappings-dense-vector.asciidoc b/serverless/partials/field-mappings-dense-vector.asciidoc new file mode 100644 index 0000000000..3c1c7e5635 --- /dev/null +++ b/serverless/partials/field-mappings-dense-vector.asciidoc @@ -0,0 +1,53 @@ +The models compatible with {es} NLP generate dense vectors as output. The +{ref}/dense-vector.html[`dense_vector`] field type is suitable for storing dense vectors +of numeric values. The index must have a field with the `dense_vector` field +type to index the embeddings that the supported third-party model that you +selected generates. Keep in mind that the model produces embeddings with a +certain number of dimensions. The `dense_vector` field must be configured with +the same number of dimensions using the `dims` option. Refer to the respective +model documentation to get information about the number of dimensions of the +embeddings. + +To review a mapping of an index for an NLP model, refer to the mapping code +snippet in the +{ml-docs}/ml-nlp-text-emb-vector-search-example.html#ex-text-emb-ingest[Add the text embedding model to an ingest inference pipeline] +section of the tutorial. The example shows how to create an index mapping that +defines the `my_embeddings.predicted_value` field - which will contain the model +output - as a `dense_vector` field. + +[source,bash] +---- +curl -X PUT "${ES_URL}/my-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "mappings": { + "properties": { + "my_embeddings.predicted_value": { <1> + "type": "dense_vector", <2> + "dims": 384 <3> + }, + "my_text_field": { <4> + "type": "text" <5> + } + } + } +} +' +---- + +<1> The name of the field that will contain the embeddings generated by the +model. + +<2> The field that contains the embeddings must be a `dense_vector` field. + +<3> The model produces embeddings with a certain number of dimensions. The +`dense_vector` field must be configured with the same number of dimensions by +the `dims` option. Refer to the respective model documentation to get +information about the number of dimensions of the embeddings. + +<4> The name of the field from which to create the dense vector representation. +In this example, the name of the field is `my_text_field`. + +<5> The field type is `text` in this example. diff --git a/serverless/partials/field-mappings-elser.asciidoc b/serverless/partials/field-mappings-elser.asciidoc new file mode 100644 index 0000000000..25f7aa5cf6 --- /dev/null +++ b/serverless/partials/field-mappings-elser.asciidoc @@ -0,0 +1,40 @@ +ELSER produces token-weight pairs as output from the input text and the query. +The {es} {ref}/sparse-vector.html[`sparse_vector`] field type can store these +token-weight pairs as numeric feature vectors. The index must have a field with +the `sparse_vector` field type to index the tokens that ELSER generates. + +To create a mapping for your ELSER index, refer to the https://www.elastic.co/docs/current/serverless/elasticsearch/elasticsearch/reference/semantic-search-elser[Create the index mapping section] +of the tutorial. The example +shows how to create an index mapping for `my-index` that defines the +`my_embeddings.tokens` field - which will contain the ELSER output - as a +`sparse_vector` field. + +[source,bash] +---- +curl -X PUT "${ES_URL}/my-index" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "mappings": { + "properties": { + "my_tokens": { <1> + "type": "sparse_vector" <2> + }, + "my_text_field": { <3> + "type": "text" <4> + } + } + } +} +' +---- + +<1> The name of the field that will contain the tokens generated by ELSER. + +<2> The field that contains the tokens must be a `sparse_vector` field. + +<3> The name of the field from which to create the sparse vector representation. +In this example, the name of the field is `my_text_field`. + +<4> The field type is `text` in this example. diff --git a/serverless/partials/field-mappings-widget.asciidoc b/serverless/partials/field-mappings-widget.asciidoc new file mode 100644 index 0000000000..32b987c7ae --- /dev/null +++ b/serverless/partials/field-mappings-widget.asciidoc @@ -0,0 +1,26 @@ + + +++++ +
+
+ + +
+
+++++ +include::./field-mappings-elser.asciidoc[] + +++++ +
+ +
+++++ diff --git a/serverless/partials/generate-embeddings-dense-vector.asciidoc b/serverless/partials/generate-embeddings-dense-vector.asciidoc new file mode 100644 index 0000000000..4eb0129b79 --- /dev/null +++ b/serverless/partials/generate-embeddings-dense-vector.asciidoc @@ -0,0 +1,38 @@ +This is how an ingest pipeline that uses a text embedding model is created: + +[source,bash] +---- +curl -X PUT "${ES_URL}/_ingest/pipeline/my-text-embeddings-pipeline" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "description": "Text embedding pipeline", + "processors": [ + { + "inference": { + "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", <1> + "target_field": "my_embeddings", + "field_map": { <2> + "my_text_field": "text_field" + } + } + } + ] +} +' +---- + +<1> The model ID of the text embedding model you want to use. + +<2> The `field_map` object maps the input document field name (which is +`my_text_field` in this example) to the name of the field that the model expects +(which is always `text_field`). + +To ingest data through the pipeline to generate text embeddings with your chosen +model, refer to the +{ml-docs}/ml-nlp-text-emb-vector-search-example.html#ex-text-emb-ingest[Add the text embedding model to an inference ingest pipeline] +section. The example shows how to create the pipeline with the inference +processor and reindex your data through the pipeline. After you successfully +ingested documents by using the pipeline, your index will contain the text +embeddings generated by the model. diff --git a/serverless/partials/generate-embeddings-elser.asciidoc b/serverless/partials/generate-embeddings-elser.asciidoc new file mode 100644 index 0000000000..3badc18360 --- /dev/null +++ b/serverless/partials/generate-embeddings-elser.asciidoc @@ -0,0 +1,31 @@ +This is how an ingest pipeline that uses the ELSER model is created: + +[source,bash] +---- +curl -X PUT "${ES_URL}/_ingest/pipeline/my-text-embeddings-pipeline" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "description": "Text embedding pipeline", + "processors": [ + { + "inference": { + "model_id": ".elser_model_2", + "input_output": [ + { + "input_field": "my_text_field", + "output_field": "my_tokens" + } + ] + } + } + ] +} +' +---- + +To ingest data through the pipeline to generate tokens with ELSER, refer to the +https://www.elastic.co/docs/current/serverless/elasticsearch/elasticsearch/reference/semantic-search-elser[Ingest the data through the {infer} ingest pipeline] section of the tutorial. After you successfully +ingested documents by using the pipeline, your index will contain the tokens +generated by ELSER. diff --git a/serverless/partials/generate-embeddings-widget.asciidoc b/serverless/partials/generate-embeddings-widget.asciidoc new file mode 100644 index 0000000000..26ff216662 --- /dev/null +++ b/serverless/partials/generate-embeddings-widget.asciidoc @@ -0,0 +1,26 @@ + + +++++ +
+
+ + +
+
+++++ +include::./generate-embeddings-elser.asciidoc[] + +++++ +
+ +
+++++ diff --git a/serverless/partials/hybrid-search-dense-vector.asciidoc b/serverless/partials/hybrid-search-dense-vector.asciidoc new file mode 100644 index 0000000000..2047511514 --- /dev/null +++ b/serverless/partials/hybrid-search-dense-vector.asciidoc @@ -0,0 +1,36 @@ +Hybrid search between a semantic and lexical query can be achieved by providing: + +* a `query` clause for the full-text query; +* a `knn` clause with the kNN search that queries the dense vector field; +* and a `rank` clause with the `rrf` parameter to rank documents using +reciprocal rank fusion. + +[source,bash] +---- +curl -X GET "${ES_URL}/my-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query": { + "match": { + "my_text_field": "the query string" + } + }, + "knn": { + "field": "text_embedding.predicted_value", + "k": 10, + "num_candidates": 100, + "query_vector_builder": { + "text_embedding": { + "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", + "model_text": "the query string" + } + } + }, + "rank": { + "rrf": {} + } +} +' +---- diff --git a/serverless/partials/hybrid-search-elser.asciidoc b/serverless/partials/hybrid-search-elser.asciidoc new file mode 100644 index 0000000000..e793c79f7b --- /dev/null +++ b/serverless/partials/hybrid-search-elser.asciidoc @@ -0,0 +1,41 @@ +Hybrid search between a semantic and lexical query can be achieved by using retrievers in your search request. +The following example uses retrievers to perform a match query and a sparse vector query, and rank them using RRF. + +[source,bash] +---- +curl -X GET "${ES_URL}/my-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "match": { + "my_text_field": "the query string" + } + } + } + }, + { + "standard": { + "query": { + "sparse_vector": { + "field": "my_tokens", + "inference_id": "my-elser-endpoint", + "query": "the query string" + } + } + } + } + ], + "window_size": 50, + "rank_constant": 20 + } + } +} +' +---- diff --git a/serverless/partials/hybrid-search-widget.asciidoc b/serverless/partials/hybrid-search-widget.asciidoc new file mode 100644 index 0000000000..dfea25dc9b --- /dev/null +++ b/serverless/partials/hybrid-search-widget.asciidoc @@ -0,0 +1,26 @@ + + +++++ +
+
+ + +
+
+++++ +include::./hybrid-search-elser.asciidoc[] + +++++ +
+ +
+++++ diff --git a/serverless/partials/minimum-vcus-detail.asciidoc b/serverless/partials/minimum-vcus-detail.asciidoc new file mode 100644 index 0000000000..0d12fe9e05 --- /dev/null +++ b/serverless/partials/minimum-vcus-detail.asciidoc @@ -0,0 +1,11 @@ +.Minimum runtime VCUs +[IMPORTANT] +==== +When you create an Elasticsearch Serverless project, a minimum number of VCUs are always allocated to your project to maintain basic capabilities. These VCUs are used for the following purposes: + +* **Ingest**: Ensure constant availability for ingesting data into your project (4 VCUs). +* **Search**: Maintain a data cache and support low latency searches (8 VCUs). + +These minimum VCUs are billed at the standard rate per VCU hour, incurring a minimum cost even when you're not actively using your project. +Learn more about https://www.elastic.co/pricing/serverless-search#what-are-the-minimum-compute-resource-vcus-on-elasticsearch-serverless[minimum VCUs on Elasticsearch Serverless]. +==== diff --git a/serverless/partials/search-dense-vector.asciidoc b/serverless/partials/search-dense-vector.asciidoc new file mode 100644 index 0000000000..897af68487 --- /dev/null +++ b/serverless/partials/search-dense-vector.asciidoc @@ -0,0 +1,29 @@ +Text embeddings produced by dense vector models can be queried using a +https://www.elastic.co/docs/current/serverless/elasticsearch/knn-search[kNN search]. +In the `knn` clause, provide the name of the +dense vector field, and a `query_vector_builder` clause with the model ID and +the query text. + +[source,bash] +---- +curl -X GET "${ES_URL}/my-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "knn": { + "field": "my_embeddings.predicted_value", + "k": 10, + "num_candidates": 100, + "query_vector_builder": { + "text_embedding": { + "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", + "model_text": "the query string" + } + } + } +} +' +---- + +// TEST[skip:TBD] diff --git a/serverless/partials/search-elser.asciidoc b/serverless/partials/search-elser.asciidoc new file mode 100644 index 0000000000..c1f2b27dec --- /dev/null +++ b/serverless/partials/search-elser.asciidoc @@ -0,0 +1,24 @@ +ELSER text embeddings can be queried using a +{ref}/query-dsl-sparse-vector-query.html[sparse vector query]. The sparse vector +query enables you to query a sparse vector field, by providing an inference ID, and the query text: + +[source,bash] +---- +curl -X GET "${ES_URL}/my-index/_search" \ +-H "Authorization: ApiKey ${API_KEY}" \ +-H "Content-Type: application/json" \ +-d' +{ + "query":{ + "sparse_vector":{ + "field": "my_tokens", <1> + "inference_id": "my-elser-endpoint", + "query": "the query string" + } + } + } +} +' +---- + +<1> The field of type `sparse_vector`. diff --git a/serverless/partials/search-widget.asciidoc b/serverless/partials/search-widget.asciidoc new file mode 100644 index 0000000000..ad9ca7e705 --- /dev/null +++ b/serverless/partials/search-widget.asciidoc @@ -0,0 +1,26 @@ + + +++++ +
+
+ + +
+
+++++ +include::./search-elser.asciidoc[] + +++++ +
+ +
+++++ From 8d5f174273c294ec680a35d05be3b1bc930c4402 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Tue, 29 Oct 2024 12:18:27 -0500 Subject: [PATCH 02/25] hard code stack version --- serverless/index.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index 9755620665..121f0e45bd 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -1,6 +1,6 @@ :doctype: book -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/versions/stack/master.asciidoc[] include::{docs-root}/shared/attributes.asciidoc[] :security-serverless: {security-docs-root}/docs/serverless From 523353e9808927df12ab8895e23833f5f041cfbb Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Tue, 29 Oct 2024 13:29:55 -0500 Subject: [PATCH 03/25] try including shared attributes using asciidoc-dir --- serverless/index.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index 121f0e45bd..d9935cc748 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -1,7 +1,7 @@ :doctype: book -include::{docs-root}/shared/versions/stack/master.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] +include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] +include::{asciidoc-dir}/../../shared/attributes.asciidoc[] :security-serverless: {security-docs-root}/docs/serverless :observability-serverless: {observability-docs-root}/docs/en/serverless From 238a45f052030a7e1a2b21464bd4dbceec615d95 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Tue, 29 Oct 2024 17:28:50 -0500 Subject: [PATCH 04/25] clean up landing page, add project-settings, add badges, replace doc related articles --- serverless/images/es-badge.svg | 14 ++++ serverless/images/icons/addDataApp.svg | 1 + serverless/images/icons/addFilter.svg | 1 + serverless/images/icons/ai-assistant-bw.svg | 1 + serverless/images/icons/ai-assistant.svg | 1 + serverless/images/icons/apmTrace.svg | 1 + serverless/images/icons/apps.svg | 1 + serverless/images/icons/arrowDown.svg | 1 + serverless/images/icons/arrowLeft.svg | 1 + serverless/images/icons/arrowRight.svg | 1 + serverless/images/icons/beaker.svg | 1 + serverless/images/icons/bell.svg | 1 + serverless/images/icons/boxesHorizontal.svg | 1 + serverless/images/icons/boxesVertical.svg | 1 + serverless/images/icons/calendar.svg | 1 + serverless/images/icons/casesApp.svg | 1 + serverless/images/icons/check.svg | 1 + serverless/images/icons/controlsVertical.svg | 1 + serverless/images/icons/copyClipboard.svg | 1 + serverless/images/icons/cross.svg | 1 + serverless/images/icons/discoverApp.svg | 1 + serverless/images/icons/discuss.svg | 1 + serverless/images/icons/document.svg | 1 + serverless/images/icons/documentation.svg | 1 + serverless/images/icons/editorComment.svg | 1 + serverless/images/icons/error.svg | 1 + serverless/images/icons/expand.svg | 1 + serverless/images/icons/exportAction.svg | 1 + serverless/images/icons/eye.svg | 1 + serverless/images/icons/filter.svg | 1 + serverless/images/icons/filterInCircle.svg | 1 + serverless/images/icons/folderCheck.svg | 1 + serverless/images/icons/gear.svg | 1 + serverless/images/icons/globe.svg | 1 + serverless/images/icons/grabHorizontal.svg | 1 + serverless/images/icons/help.svg | 1 + serverless/images/icons/iInCircle.svg | 1 + serverless/images/icons/image.svg | 1 + serverless/images/icons/importAction.svg | 1 + serverless/images/icons/indexClose.svg | 1 + serverless/images/icons/indexOpen.svg | 1 + serverless/images/icons/inspect.svg | 1 + serverless/images/icons/lensApp.svg | 1 + serverless/images/icons/list.svg | 1 + serverless/images/icons/listAdd.svg | 1 + serverless/images/icons/logoElasticsearch.svg | 1 + serverless/images/icons/logoObservability.svg | 1 + serverless/images/icons/logoSecurity.svg | 1 + serverless/images/icons/menuLeft.svg | 1 + serverless/images/icons/menuRight.svg | 1 + serverless/images/icons/merge.svg | 1 + serverless/images/icons/minus.svg | 1 + serverless/images/icons/minusInCircle.svg | 1 + serverless/images/icons/pagesSelect.svg | 1 + serverless/images/icons/pencil.svg | 1 + serverless/images/icons/plusInCircle.svg | 1 + .../images/icons/plusInCircleFilled copy.svg | 1 + .../images/icons/plusInCircleFilled.svg | 1 + serverless/images/icons/popout.svg | 1 + serverless/images/icons/questionInCircle.svg | 1 + serverless/images/icons/refresh.svg | 1 + serverless/images/icons/save.svg | 1 + serverless/images/icons/share.svg | 1 + serverless/images/icons/sortDown.svg | 1 + serverless/images/icons/sortUp.svg | 1 + serverless/images/icons/spaces.svg | 6 ++ serverless/images/icons/starEmpty.svg | 1 + .../images/icons/tableDensityCompact.svg | 1 + serverless/images/icons/timeline.svg | 1 + serverless/images/icons/trash.svg | 1 + serverless/images/icons/visText.svg | 1 + serverless/images/icons/warning.svg | 1 + serverless/images/obs-badge.svg | 14 ++++ serverless/images/sec-badge.svg | 7 ++ serverless/index.asciidoc | 70 ++++++++++++++--- .../devtools/debug-grok-expressions.asciidoc | 2 +- .../devtools/debug-painless-scripts.asciidoc | 2 +- .../devtools/general-developer-tools.asciidoc | 19 ++--- .../profile-queries-and-aggregations.asciidoc | 2 +- .../run-api-requests-in-the-console.asciidoc | 2 +- .../elasticsearch/apis-http-apis.asciidoc | 20 +---- ...alize-your-data-create-dashboards.asciidoc | 2 +- ...e-your-data-create-visualizations.asciidoc | 2 +- .../what-is-elasticsearch-serverless.asciidoc | 71 +++++------------ serverless/pages/general/index.asciidoc | 31 -------- serverless/pages/general/manage-org.asciidoc | 16 +--- .../action-connectors.asciidoc | 4 +- .../pages/project-settings/api-keys.asciidoc | 12 +-- .../project-settings/data-views.asciidoc | 10 +-- .../pages/project-settings/files.asciidoc | 6 +- .../fleet-and-elastic-agent.asciidoc | 4 +- .../index-management.asciidoc | 12 +-- .../pages/project-settings/index.asciidoc | 29 +++++++ .../ingest-pipelines.asciidoc | 8 +- .../project-settings/integrations.asciidoc | 4 +- .../logstash-pipelines.asciidoc | 6 +- .../machine-learning.asciidoc | 8 +- .../maintenance-windows.asciidoc | 6 +- .../pages/project-settings/maps.asciidoc | 16 ++-- .../project-and-management-settings.asciidoc | 15 ++-- .../project-settings.asciidoc | 78 ++++++++++--------- .../pages/project-settings/reports.asciidoc | 4 +- .../pages/project-settings/rules.asciidoc | 18 ++--- .../project-settings/saved-objects.asciidoc | 14 ++-- .../pages/project-settings/spaces.asciidoc | 20 ++--- .../pages/project-settings/tags.asciidoc | 10 +-- .../project-settings/transforms.asciidoc | 6 +- 107 files changed, 369 insertions(+), 271 deletions(-) create mode 100644 serverless/images/es-badge.svg create mode 100644 serverless/images/icons/addDataApp.svg create mode 100644 serverless/images/icons/addFilter.svg create mode 100644 serverless/images/icons/ai-assistant-bw.svg create mode 100644 serverless/images/icons/ai-assistant.svg create mode 100644 serverless/images/icons/apmTrace.svg create mode 100644 serverless/images/icons/apps.svg create mode 100644 serverless/images/icons/arrowDown.svg create mode 100644 serverless/images/icons/arrowLeft.svg create mode 100644 serverless/images/icons/arrowRight.svg create mode 100644 serverless/images/icons/beaker.svg create mode 100644 serverless/images/icons/bell.svg create mode 100644 serverless/images/icons/boxesHorizontal.svg create mode 100644 serverless/images/icons/boxesVertical.svg create mode 100644 serverless/images/icons/calendar.svg create mode 100644 serverless/images/icons/casesApp.svg create mode 100644 serverless/images/icons/check.svg create mode 100644 serverless/images/icons/controlsVertical.svg create mode 100644 serverless/images/icons/copyClipboard.svg create mode 100644 serverless/images/icons/cross.svg create mode 100644 serverless/images/icons/discoverApp.svg create mode 100644 serverless/images/icons/discuss.svg create mode 100644 serverless/images/icons/document.svg create mode 100644 serverless/images/icons/documentation.svg create mode 100644 serverless/images/icons/editorComment.svg create mode 100644 serverless/images/icons/error.svg create mode 100644 serverless/images/icons/expand.svg create mode 100644 serverless/images/icons/exportAction.svg create mode 100644 serverless/images/icons/eye.svg create mode 100644 serverless/images/icons/filter.svg create mode 100644 serverless/images/icons/filterInCircle.svg create mode 100644 serverless/images/icons/folderCheck.svg create mode 100644 serverless/images/icons/gear.svg create mode 100644 serverless/images/icons/globe.svg create mode 100644 serverless/images/icons/grabHorizontal.svg create mode 100644 serverless/images/icons/help.svg create mode 100644 serverless/images/icons/iInCircle.svg create mode 100644 serverless/images/icons/image.svg create mode 100644 serverless/images/icons/importAction.svg create mode 100644 serverless/images/icons/indexClose.svg create mode 100644 serverless/images/icons/indexOpen.svg create mode 100644 serverless/images/icons/inspect.svg create mode 100644 serverless/images/icons/lensApp.svg create mode 100644 serverless/images/icons/list.svg create mode 100644 serverless/images/icons/listAdd.svg create mode 100644 serverless/images/icons/logoElasticsearch.svg create mode 100644 serverless/images/icons/logoObservability.svg create mode 100644 serverless/images/icons/logoSecurity.svg create mode 100644 serverless/images/icons/menuLeft.svg create mode 100644 serverless/images/icons/menuRight.svg create mode 100644 serverless/images/icons/merge.svg create mode 100644 serverless/images/icons/minus.svg create mode 100644 serverless/images/icons/minusInCircle.svg create mode 100644 serverless/images/icons/pagesSelect.svg create mode 100644 serverless/images/icons/pencil.svg create mode 100644 serverless/images/icons/plusInCircle.svg create mode 100644 serverless/images/icons/plusInCircleFilled copy.svg create mode 100644 serverless/images/icons/plusInCircleFilled.svg create mode 100644 serverless/images/icons/popout.svg create mode 100644 serverless/images/icons/questionInCircle.svg create mode 100644 serverless/images/icons/refresh.svg create mode 100644 serverless/images/icons/save.svg create mode 100644 serverless/images/icons/share.svg create mode 100644 serverless/images/icons/sortDown.svg create mode 100644 serverless/images/icons/sortUp.svg create mode 100644 serverless/images/icons/spaces.svg create mode 100644 serverless/images/icons/starEmpty.svg create mode 100644 serverless/images/icons/tableDensityCompact.svg create mode 100644 serverless/images/icons/timeline.svg create mode 100644 serverless/images/icons/trash.svg create mode 100644 serverless/images/icons/visText.svg create mode 100644 serverless/images/icons/warning.svg create mode 100644 serverless/images/obs-badge.svg create mode 100644 serverless/images/sec-badge.svg delete mode 100644 serverless/pages/general/index.asciidoc create mode 100644 serverless/pages/project-settings/index.asciidoc diff --git a/serverless/images/es-badge.svg b/serverless/images/es-badge.svg new file mode 100644 index 0000000000..8e4fcd839c --- /dev/null +++ b/serverless/images/es-badge.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/serverless/images/icons/addDataApp.svg b/serverless/images/icons/addDataApp.svg new file mode 100644 index 0000000000..124eef8b47 --- /dev/null +++ b/serverless/images/icons/addDataApp.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/addFilter.svg b/serverless/images/icons/addFilter.svg new file mode 100644 index 0000000000..5da375f4ec --- /dev/null +++ b/serverless/images/icons/addFilter.svg @@ -0,0 +1 @@ + diff --git a/serverless/images/icons/ai-assistant-bw.svg b/serverless/images/icons/ai-assistant-bw.svg new file mode 100644 index 0000000000..06896113e4 --- /dev/null +++ b/serverless/images/icons/ai-assistant-bw.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/ai-assistant.svg b/serverless/images/icons/ai-assistant.svg new file mode 100644 index 0000000000..ac51eccb68 --- /dev/null +++ b/serverless/images/icons/ai-assistant.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/apmTrace.svg b/serverless/images/icons/apmTrace.svg new file mode 100644 index 0000000000..800b8e51a4 --- /dev/null +++ b/serverless/images/icons/apmTrace.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/apps.svg b/serverless/images/icons/apps.svg new file mode 100644 index 0000000000..ad6f7baf1f --- /dev/null +++ b/serverless/images/icons/apps.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/arrowDown.svg b/serverless/images/icons/arrowDown.svg new file mode 100644 index 0000000000..9022cdedc2 --- /dev/null +++ b/serverless/images/icons/arrowDown.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/arrowLeft.svg b/serverless/images/icons/arrowLeft.svg new file mode 100644 index 0000000000..d5956d01bb --- /dev/null +++ b/serverless/images/icons/arrowLeft.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/arrowRight.svg b/serverless/images/icons/arrowRight.svg new file mode 100644 index 0000000000..b2d76bddc2 --- /dev/null +++ b/serverless/images/icons/arrowRight.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/beaker.svg b/serverless/images/icons/beaker.svg new file mode 100644 index 0000000000..05eb97809c --- /dev/null +++ b/serverless/images/icons/beaker.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/bell.svg b/serverless/images/icons/bell.svg new file mode 100644 index 0000000000..61f2d5493a --- /dev/null +++ b/serverless/images/icons/bell.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/boxesHorizontal.svg b/serverless/images/icons/boxesHorizontal.svg new file mode 100644 index 0000000000..d845a6b9db --- /dev/null +++ b/serverless/images/icons/boxesHorizontal.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/boxesVertical.svg b/serverless/images/icons/boxesVertical.svg new file mode 100644 index 0000000000..aed10b0d8e --- /dev/null +++ b/serverless/images/icons/boxesVertical.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/calendar.svg b/serverless/images/icons/calendar.svg new file mode 100644 index 0000000000..ed311de10c --- /dev/null +++ b/serverless/images/icons/calendar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/casesApp.svg b/serverless/images/icons/casesApp.svg new file mode 100644 index 0000000000..bbb2c459a5 --- /dev/null +++ b/serverless/images/icons/casesApp.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/check.svg b/serverless/images/icons/check.svg new file mode 100644 index 0000000000..1145dd301d --- /dev/null +++ b/serverless/images/icons/check.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/controlsVertical.svg b/serverless/images/icons/controlsVertical.svg new file mode 100644 index 0000000000..c7851102ba --- /dev/null +++ b/serverless/images/icons/controlsVertical.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/copyClipboard.svg b/serverless/images/icons/copyClipboard.svg new file mode 100644 index 0000000000..d21fa159a5 --- /dev/null +++ b/serverless/images/icons/copyClipboard.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/cross.svg b/serverless/images/icons/cross.svg new file mode 100644 index 0000000000..82df3e03d3 --- /dev/null +++ b/serverless/images/icons/cross.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/discoverApp.svg b/serverless/images/icons/discoverApp.svg new file mode 100644 index 0000000000..33041ae01c --- /dev/null +++ b/serverless/images/icons/discoverApp.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/discuss.svg b/serverless/images/icons/discuss.svg new file mode 100644 index 0000000000..f6345463ba --- /dev/null +++ b/serverless/images/icons/discuss.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/document.svg b/serverless/images/icons/document.svg new file mode 100644 index 0000000000..570b21dcb0 --- /dev/null +++ b/serverless/images/icons/document.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/documentation.svg b/serverless/images/icons/documentation.svg new file mode 100644 index 0000000000..b519c72f03 --- /dev/null +++ b/serverless/images/icons/documentation.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/editorComment.svg b/serverless/images/icons/editorComment.svg new file mode 100644 index 0000000000..3cf20f691d --- /dev/null +++ b/serverless/images/icons/editorComment.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/error.svg b/serverless/images/icons/error.svg new file mode 100644 index 0000000000..963c833e42 --- /dev/null +++ b/serverless/images/icons/error.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/expand.svg b/serverless/images/icons/expand.svg new file mode 100644 index 0000000000..66b1327a34 --- /dev/null +++ b/serverless/images/icons/expand.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/exportAction.svg b/serverless/images/icons/exportAction.svg new file mode 100644 index 0000000000..53a87fda0e --- /dev/null +++ b/serverless/images/icons/exportAction.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/eye.svg b/serverless/images/icons/eye.svg new file mode 100644 index 0000000000..0e576f21d5 --- /dev/null +++ b/serverless/images/icons/eye.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/filter.svg b/serverless/images/icons/filter.svg new file mode 100644 index 0000000000..efebf84ede --- /dev/null +++ b/serverless/images/icons/filter.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/filterInCircle.svg b/serverless/images/icons/filterInCircle.svg new file mode 100644 index 0000000000..ef3b61fd8f --- /dev/null +++ b/serverless/images/icons/filterInCircle.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/folderCheck.svg b/serverless/images/icons/folderCheck.svg new file mode 100644 index 0000000000..f1ac8dbcef --- /dev/null +++ b/serverless/images/icons/folderCheck.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/gear.svg b/serverless/images/icons/gear.svg new file mode 100644 index 0000000000..51d6cb021b --- /dev/null +++ b/serverless/images/icons/gear.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/globe.svg b/serverless/images/icons/globe.svg new file mode 100644 index 0000000000..ee2739ea6f --- /dev/null +++ b/serverless/images/icons/globe.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/grabHorizontal.svg b/serverless/images/icons/grabHorizontal.svg new file mode 100644 index 0000000000..7e824b4607 --- /dev/null +++ b/serverless/images/icons/grabHorizontal.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/help.svg b/serverless/images/icons/help.svg new file mode 100644 index 0000000000..ad01598e47 --- /dev/null +++ b/serverless/images/icons/help.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/iInCircle.svg b/serverless/images/icons/iInCircle.svg new file mode 100644 index 0000000000..33429341f3 --- /dev/null +++ b/serverless/images/icons/iInCircle.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/image.svg b/serverless/images/icons/image.svg new file mode 100644 index 0000000000..6f215f91b4 --- /dev/null +++ b/serverless/images/icons/image.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/importAction.svg b/serverless/images/icons/importAction.svg new file mode 100644 index 0000000000..341653e6b1 --- /dev/null +++ b/serverless/images/icons/importAction.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/indexClose.svg b/serverless/images/icons/indexClose.svg new file mode 100644 index 0000000000..cb4d8f2cd2 --- /dev/null +++ b/serverless/images/icons/indexClose.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/indexOpen.svg b/serverless/images/icons/indexOpen.svg new file mode 100644 index 0000000000..09e9d7284b --- /dev/null +++ b/serverless/images/icons/indexOpen.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/inspect.svg b/serverless/images/icons/inspect.svg new file mode 100644 index 0000000000..43374b4aa4 --- /dev/null +++ b/serverless/images/icons/inspect.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/lensApp.svg b/serverless/images/icons/lensApp.svg new file mode 100644 index 0000000000..59af79275b --- /dev/null +++ b/serverless/images/icons/lensApp.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/list.svg b/serverless/images/icons/list.svg new file mode 100644 index 0000000000..52e8e7acd1 --- /dev/null +++ b/serverless/images/icons/list.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/listAdd.svg b/serverless/images/icons/listAdd.svg new file mode 100644 index 0000000000..b59e25bcc4 --- /dev/null +++ b/serverless/images/icons/listAdd.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/logoElasticsearch.svg b/serverless/images/icons/logoElasticsearch.svg new file mode 100644 index 0000000000..7fcb806028 --- /dev/null +++ b/serverless/images/icons/logoElasticsearch.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/logoObservability.svg b/serverless/images/icons/logoObservability.svg new file mode 100644 index 0000000000..ce14d37b0b --- /dev/null +++ b/serverless/images/icons/logoObservability.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/logoSecurity.svg b/serverless/images/icons/logoSecurity.svg new file mode 100644 index 0000000000..1558692001 --- /dev/null +++ b/serverless/images/icons/logoSecurity.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/menuLeft.svg b/serverless/images/icons/menuLeft.svg new file mode 100644 index 0000000000..40d511e5af --- /dev/null +++ b/serverless/images/icons/menuLeft.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/menuRight.svg b/serverless/images/icons/menuRight.svg new file mode 100644 index 0000000000..1a772128d7 --- /dev/null +++ b/serverless/images/icons/menuRight.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/merge.svg b/serverless/images/icons/merge.svg new file mode 100644 index 0000000000..478d340599 --- /dev/null +++ b/serverless/images/icons/merge.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/minus.svg b/serverless/images/icons/minus.svg new file mode 100644 index 0000000000..763922a916 --- /dev/null +++ b/serverless/images/icons/minus.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/minusInCircle.svg b/serverless/images/icons/minusInCircle.svg new file mode 100644 index 0000000000..3851640918 --- /dev/null +++ b/serverless/images/icons/minusInCircle.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/pagesSelect.svg b/serverless/images/icons/pagesSelect.svg new file mode 100644 index 0000000000..6238881210 --- /dev/null +++ b/serverless/images/icons/pagesSelect.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/pencil.svg b/serverless/images/icons/pencil.svg new file mode 100644 index 0000000000..cb16b5d2f0 --- /dev/null +++ b/serverless/images/icons/pencil.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/plusInCircle.svg b/serverless/images/icons/plusInCircle.svg new file mode 100644 index 0000000000..2a655e0396 --- /dev/null +++ b/serverless/images/icons/plusInCircle.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/plusInCircleFilled copy.svg b/serverless/images/icons/plusInCircleFilled copy.svg new file mode 100644 index 0000000000..c2052e4c5f --- /dev/null +++ b/serverless/images/icons/plusInCircleFilled copy.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/plusInCircleFilled.svg b/serverless/images/icons/plusInCircleFilled.svg new file mode 100644 index 0000000000..c2052e4c5f --- /dev/null +++ b/serverless/images/icons/plusInCircleFilled.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/popout.svg b/serverless/images/icons/popout.svg new file mode 100644 index 0000000000..875bf6662d --- /dev/null +++ b/serverless/images/icons/popout.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/questionInCircle.svg b/serverless/images/icons/questionInCircle.svg new file mode 100644 index 0000000000..b715f289ad --- /dev/null +++ b/serverless/images/icons/questionInCircle.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/refresh.svg b/serverless/images/icons/refresh.svg new file mode 100644 index 0000000000..58662be4af --- /dev/null +++ b/serverless/images/icons/refresh.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/save.svg b/serverless/images/icons/save.svg new file mode 100644 index 0000000000..d84f9df2d0 --- /dev/null +++ b/serverless/images/icons/save.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/share.svg b/serverless/images/icons/share.svg new file mode 100644 index 0000000000..c4b52ea594 --- /dev/null +++ b/serverless/images/icons/share.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/sortDown.svg b/serverless/images/icons/sortDown.svg new file mode 100644 index 0000000000..7efa30e917 --- /dev/null +++ b/serverless/images/icons/sortDown.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/sortUp.svg b/serverless/images/icons/sortUp.svg new file mode 100644 index 0000000000..c5d0f004ad --- /dev/null +++ b/serverless/images/icons/sortUp.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/spaces.svg b/serverless/images/icons/spaces.svg new file mode 100644 index 0000000000..745922858b --- /dev/null +++ b/serverless/images/icons/spaces.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/serverless/images/icons/starEmpty.svg b/serverless/images/icons/starEmpty.svg new file mode 100644 index 0000000000..177c197697 --- /dev/null +++ b/serverless/images/icons/starEmpty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/tableDensityCompact.svg b/serverless/images/icons/tableDensityCompact.svg new file mode 100644 index 0000000000..9eabb5fe83 --- /dev/null +++ b/serverless/images/icons/tableDensityCompact.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/timeline.svg b/serverless/images/icons/timeline.svg new file mode 100644 index 0000000000..12257170f2 --- /dev/null +++ b/serverless/images/icons/timeline.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/trash.svg b/serverless/images/icons/trash.svg new file mode 100644 index 0000000000..5f3d6de047 --- /dev/null +++ b/serverless/images/icons/trash.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/visText.svg b/serverless/images/icons/visText.svg new file mode 100644 index 0000000000..9c6c8c9e0d --- /dev/null +++ b/serverless/images/icons/visText.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/icons/warning.svg b/serverless/images/icons/warning.svg new file mode 100644 index 0000000000..642f726c74 --- /dev/null +++ b/serverless/images/icons/warning.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/serverless/images/obs-badge.svg b/serverless/images/obs-badge.svg new file mode 100644 index 0000000000..436ea65bcd --- /dev/null +++ b/serverless/images/obs-badge.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/serverless/images/sec-badge.svg b/serverless/images/sec-badge.svg new file mode 100644 index 0000000000..2a2da118f1 --- /dev/null +++ b/serverless/images/sec-badge.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index d9935cc748..0f6b391289 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -3,11 +3,16 @@ include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] include::{asciidoc-dir}/../../shared/attributes.asciidoc[] -:security-serverless: {security-docs-root}/docs/serverless -:observability-serverless: {observability-docs-root}/docs/en/serverless -:elasticsearch-serverless: {docs-content-root}/serverless/pages/elasticsearch -:general-serverless: {docs-content-root}/serverless/pages/general -:devtools-serverless: {docs-content-root}/serverless/pages/devtools +:security-serverless: {security-docs-root}/docs/serverless +:observability-serverless: {observability-docs-root}/docs/en/serverless +:elasticsearch-serverless: {docs-content-root}/serverless/pages/elasticsearch +:general-serverless: {docs-content-root}/serverless/pages/general +:devtools-serverless: {docs-content-root}/serverless/pages/devtools +:project-settings-serverless: {docs-content-root}/serverless/pages/project-settings + +:es-badge: <> +:obs-badge: <> +:sec-badge: <> = Serverless @@ -42,7 +47,11 @@ include::{general-serverless}/user-profile.asciidoc[leveloffset=+2] include::{general-serverless}/cloud-regions.asciidoc[leveloffset=+2] [[what-is-elasticsearch-serverless]] -== Elasticsearch +== Elasticsearch serverless + +++++ +Elasticsearch +++++ include::{elasticsearch-serverless}/what-is-elasticsearch-serverless.asciidoc[leveloffset=+2] @@ -92,9 +101,13 @@ include::{elasticsearch-serverless}/serverless-differences.asciidoc[leveloffset= include::{elasticsearch-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] [[what-is-observability-serverless]] -== Elastic Observability +== Elastic Observability serverless -Hello world +++++ +Elastic Observability +++++ + +include::{observability-serverless}/what-is-observability-serverless.asciidoc[leveloffset=+2] include::{observability-serverless}/observability-overview.asciidoc[leveloffset=+2] @@ -243,9 +256,13 @@ include::{observability-serverless}/elastic-entity-model.asciidoc[leveloffset=+2 include::{observability-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] [[what-is-security-serverless]] -== Elastic Security +== Elastic Security serverless + +++++ +Elastic Security +++++ -Hello world +include::{security-serverless}/what-is-security-serverless.asciidoc[leveloffset=+2] include::{security-serverless}/security-overview.asciidoc[leveloffset=+2] @@ -430,9 +447,10 @@ include::{security-serverless}/troubleshooting/troubleshoot-endpoints.asciidoc[l include::{security-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] +[[developer-tools]] == Dev tools -Hello world +include::{devtools-serverless}/general-developer-tools.asciidoc[leveloffset=+2] include::{devtools-serverless}/run-api-requests-in-the-console.asciidoc[leveloffset=+2] @@ -444,6 +462,34 @@ include::{devtools-serverless}/debug-painless-scripts.asciidoc[leveloffset=+2] include::{devtools-serverless}/developer-tools-troubleshooting.asciidoc[leveloffset=+2] +[[project-and-management-settings]] == Project and management settings -Hello world \ No newline at end of file +include::{project-settings-serverless}/project-and-management-settings.asciidoc[leveloffset=+2] + +include::{project-settings-serverless}/project-settings.asciidoc[leveloffset=+2] +include::{project-settings-serverless}/api-keys.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/action-connectors.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/custom-roles.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/data-views.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/files.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/index-management.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/ingest-pipelines.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/logstash-pipelines.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/machine-learning.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/maintenance-windows.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/maps.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/reports.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/rules.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/saved-objects.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/spaces.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/tags.asciidoc[leveloffset=+3] +include::{project-settings-serverless}/transforms.asciidoc[leveloffset=+3] + +include::{project-settings-serverless}/integrations.asciidoc[leveloffset=+2] + +include::{project-settings-serverless}/fleet-and-elastic-agent.asciidoc[leveloffset=+2] + +// Hidden pages +include::{elasticsearch-serverless}/explore-your-data-visualize-your-data-create-dashboards.asciidoc[leveloffset=+1] +include::{elasticsearch-serverless}/explore-your-data-visualize-your-data-create-visualizations.asciidoc[leveloffset=+1] \ No newline at end of file diff --git a/serverless/pages/devtools/debug-grok-expressions.asciidoc b/serverless/pages/devtools/debug-grok-expressions.asciidoc index 9c63a6f20c..d65939e661 100644 --- a/serverless/pages/devtools/debug-grok-expressions.asciidoc +++ b/serverless/pages/devtools/debug-grok-expressions.asciidoc @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} You can build and debug grok patterns in the **Grok Debugger** before you use them in your data processing pipelines. Grok is a pattern-matching syntax that you can use to parse and structure arbitrary text. diff --git a/serverless/pages/devtools/debug-painless-scripts.asciidoc b/serverless/pages/devtools/debug-painless-scripts.asciidoc index 78a5d601c0..a832b84345 100644 --- a/serverless/pages/devtools/debug-painless-scripts.asciidoc +++ b/serverless/pages/devtools/debug-painless-scripts.asciidoc @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {obs-badge} {sec-badge} beta::[] diff --git a/serverless/pages/devtools/general-developer-tools.asciidoc b/serverless/pages/devtools/general-developer-tools.asciidoc index 04bc980852..ac24b5aa11 100644 --- a/serverless/pages/devtools/general-developer-tools.asciidoc +++ b/serverless/pages/devtools/general-developer-tools.asciidoc @@ -1,6 +1,3 @@ -[[-serverless-devtools-developer-tools]] -= Developer tools - :description: Use our developer tools to interact with your data. :keywords: serverless, dev tools, overview @@ -9,19 +6,19 @@ preview:[] |=== | Feature | Description | Available in -| https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[Console] +| <> | Interact with Elastic REST APIs. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/devtools/profile-queries-and-aggregations[{searchprofiler}] +| <> | Inspect and analyze your search queries. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/devtools/debug-grok-expressions[Grok Debugger] +| <> | Build and debug grok patterns before you use them in your data processing pipelines. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/devtools/debug-painless-scripts[Painless Lab] +| <> | Use an interactive code editor to test and debug Painless scripts in real time. -| +| {obs-badge}{sec-badge} |=== diff --git a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc index c3f447308a..23d1ea9e75 100644 --- a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc +++ b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} //// /* TODO: The following content was copied verbatim from the ES docs on Oct 5, 2023. It should be included through diff --git a/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc b/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc index 65f9522553..f43964a2d6 100644 --- a/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc +++ b/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} **Console** lets you interact with https://www.elastic.co/docs/api[Elasticsearch and Kibana serverless APIs] from your project. diff --git a/serverless/pages/elasticsearch/apis-http-apis.asciidoc b/serverless/pages/elasticsearch/apis-http-apis.asciidoc index d619057cf0..f2ebbe4695 100644 --- a/serverless/pages/elasticsearch/apis-http-apis.asciidoc +++ b/serverless/pages/elasticsearch/apis-http-apis.asciidoc @@ -6,20 +6,6 @@ preview:[] - +* <> +* <> +* https://www.elastic.co/docs/api/[API Reference]: Explore the reference information for Elastic Serverless REST APIs diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc index 7ca035d302..b2b8dcdafb 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data-dashboards]] +[role="exclude",id="explore-your-data-dashboards"] = Create dashboards :description: Create dashboards to visualize and monitor your {es} data. diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc index 1879230f88..cc0f7683c6 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data-visualizations]] +[role="exclude",id="explore-your-data-visualizations"] = Create visualizations :description: Create charts, graphs, maps, and more from your {es} data. diff --git a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc index a950da8126..a5087641bc 100644 --- a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc +++ b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc @@ -1,9 +1,13 @@ +//// +To be rewritten/refined +//// + :description: Build search solutions and applications with {es}. :keywords: serverless, elasticsearch, overview preview:[] - +Elasticsearch allows you to build custom applications. Whether you have structured or unstructured text, numerical data, or geospatial data, Elasticsearch can efficiently store and index it in a way that supports fast searches. .Understanding Elasticsearch on serverless [IMPORTANT] @@ -11,53 +15,18 @@ preview:[] Refer to <> and <> for important details, including features and limitations specific to {es} on serverless. ==== - - - +[discrete] +== Get started + +* <>: Create your first Elasticsearch project. +* <>: Learn how to get your data into Elasticsearch. + +[discrete] +== How to + +* <>: Build your queries to perform and combine many types of searches. +* <>: Search, filter your data, and display your findings. +* <>: Create rules to detect complex conditions and trigger alerts. +* <>: Send requests with Console and profile queries with Search Profiler. +* <>: Manage user access, billing, and check performance metrics. + diff --git a/serverless/pages/general/index.asciidoc b/serverless/pages/general/index.asciidoc deleted file mode 100644 index 4036873e17..0000000000 --- a/serverless/pages/general/index.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -= Welcome to Elastic serverless - -include::./what-is-serverless.asciidoc[leveloffset=+1] - -include::./sign-up.asciidoc[leveloffset=+1] - -include::./manage-org.asciidoc[leveloffset=+1] -include::./manage-access-to-org.asciidoc[leveloffset=+2] -include::./manage-access-to-org-user-roles.asciidoc[leveloffset=+2] -include::./manage-access-to-org-from-existing-account.asciidoc[leveloffset=+2] - -include::undefined[leveloffset=+1] -include::./manage-your-project-rest-api.asciidoc[leveloffset=+2] - -include::./manage-billing.asciidoc[leveloffset=+1] -include::./manage-billing-check-subscription.asciidoc[leveloffset=+2] -include::./manage-billing-monitor-usage.asciidoc[leveloffset=+2] -include::./manage-billing-history.asciidoc[leveloffset=+2] -include::./manage-billing-pricing-model.asciidoc[leveloffset=+2] -include::./manage-billing-stop-project.asciidoc[leveloffset=+2] - -include::./service-status.asciidoc[leveloffset=+1] - -include::./user-profile.asciidoc[leveloffset=+1] - -include::undefined[leveloffset=+1] diff --git a/serverless/pages/general/manage-org.asciidoc b/serverless/pages/general/manage-org.asciidoc index 45e922b227..9ef5be02b2 100644 --- a/serverless/pages/general/manage-org.asciidoc +++ b/serverless/pages/general/manage-org.asciidoc @@ -10,16 +10,6 @@ When you sign up to Elastic Cloud, you create an **organization**. This organization is the umbrella for all of your Elastic Cloud resources, users, and account settings. Every organization has a unique identifier. Bills are invoiced according to the billing contact and details that you set for your organization. - +* <> +* <> +* <> diff --git a/serverless/pages/project-settings/action-connectors.asciidoc b/serverless/pages/project-settings/action-connectors.asciidoc index d122d32f6c..9bd1eb08fa 100644 --- a/serverless/pages/project-settings/action-connectors.asciidoc +++ b/serverless/pages/project-settings/action-connectors.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-action-connectors]] +[[action-connectors]] = {connectors-app} :description: Configure connections to third party systems for use in cases and rules. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} The list of available connectors varies by project type. diff --git a/serverless/pages/project-settings/api-keys.asciidoc b/serverless/pages/project-settings/api-keys.asciidoc index e6f27cee95..f181ffc222 100644 --- a/serverless/pages/project-settings/api-keys.asciidoc +++ b/serverless/pages/project-settings/api-keys.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-api-keys]] +[[api-keys]] = {api-keys-app} :description: API keys allow access to the {stack} on behalf of a user. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} API keys are security mechanisms used to authenticate and authorize access to {stack} resources, and ensure that only authorized users or applications are able to interact with the {stack}. @@ -38,7 +38,7 @@ cluster privileges to use API keys in Elastic. API keys can also be seen in a r //// [discrete] -[[-serverless-api-keys-create-an-api-key]] +[[api-keys-create-an-api-key]] == Create an API key In **{api-keys-app}**, click **Create API key**: @@ -61,7 +61,7 @@ authenticate access using a web browser. ==== [discrete] -[[-serverless-api-keys-restrict-privileges]] +[[api-keys-restrict-privileges]] === Restrict privileges When you create or update an API key, use **Restrict privileges** to limit the permissions. Define the permissions using a JSON `role_descriptors` object, where you specify one or more roles and the associated privileges. @@ -92,7 +92,7 @@ For example, the following `role_descriptors` object defines a `books-read-only` For the `role_descriptors` object schema, check out the {ref}/security-api-create-api-key.html#security-api-create-api-key-request-body[`/_security/api_key` endpoint] docs. For supported privileges, check {ref}/security-privileges.html#privileges-list-indices[Security privileges]. [discrete] -[[-serverless-api-keys-update-an-api-key]] +[[api-keys-update-an-api-key]] == Update an API key In **{api-keys-app}**, click on the name of the key. @@ -101,7 +101,7 @@ You can update only **Restrict privileges** and **Include metadata**. // TBD: Refer to the update API key documentation to learn more about updating personal API keys. [discrete] -[[-serverless-api-keys-view-and-delete-api-keys]] +[[api-keys-view-and-delete-api-keys]] == View and delete API keys The **{api-keys-app}** app lists your API keys, including the name, date created, and status. diff --git a/serverless/pages/project-settings/data-views.asciidoc b/serverless/pages/project-settings/data-views.asciidoc index 204d69a72d..ae51d6c304 100644 --- a/serverless/pages/project-settings/data-views.asciidoc +++ b/serverless/pages/project-settings/data-views.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-data-views]] +[[data-views]] = {data-sources-cap} :description: Elastic requires a {data-source} to access the {es} data that you want to explore. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} A {data-source} can point to one or more indices, {ref}/data-streams.html[data streams], or {ref}/alias.html[index aliases]. For example, a {data-source} can point to your log data from yesterday or all indices that contain your data. @@ -30,7 +30,7 @@ For example, a {data-source} can point to your log data from yesterday or all in //// [discrete] -[[-serverless-data-views-create-a-data-view]] +[[data-views-create-a-data-view]] == Create a data view After you've loaded your data, follow these steps to create a {data-source}: @@ -74,7 +74,7 @@ based on different timestamps. You can manage your data views in **{project-settings} → {manage-app} → {data-views-app}**. [discrete] -[[-serverless-data-views-create-a-temporary-data-source]] +[[data-views-create-a-temporary-data-source]] === Create a temporary {data-source} Want to explore your data or create a visualization without saving it as a data view? @@ -160,7 +160,7 @@ aggregations using that {data-source} in Elastic take advantage of {ccs}. */ //// [discrete] -[[-serverless-data-views-delete-a-data-source]] +[[data-views-delete-a-data-source]] == Delete a {data-source} When you delete a {data-source}, you cannot recover the associated field formatters, runtime fields, source filters, diff --git a/serverless/pages/project-settings/files.asciidoc b/serverless/pages/project-settings/files.asciidoc index 36ea44ea5d..b29d590e00 100644 --- a/serverless/pages/project-settings/files.asciidoc +++ b/serverless/pages/project-settings/files.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-files]] +[[files]] = {files-app} :description: Manage files that are stored in Elastic. @@ -6,9 +6,9 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} -Several {serverless-full} features let you upload files. For example, you can add files to https://www.elastic.co/docs/current/serverless/observability/cases[cases] or upload a logo to an **Image** panel in a https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-dashboards[dashboard]. +Several {serverless-full} features let you upload files. For example, you can add files to <> or upload a logo to an **Image** panel in a <>. You can access these uploaded files in **{project-settings} → {manage-app} → {files-app}**. diff --git a/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc b/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc index 111c1e3ac6..f5162543af 100644 --- a/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc +++ b/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-fleet-and-elastic-agent]] +[[fleet-and-elastic-agent]] = Fleet and Elastic Agent :description: Centrally manage your Elastic Agents in Fleet @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {obs-badge} {sec-badge} {agent} is a single, unified way to add monitoring for logs, metrics, and other types of data to a host. It can also protect hosts from security threats, query data from operating systems, forward data from remote services or hardware, and more. diff --git a/serverless/pages/project-settings/index-management.asciidoc b/serverless/pages/project-settings/index-management.asciidoc index 67ca8f5e03..bb19e30b5f 100644 --- a/serverless/pages/project-settings/index-management.asciidoc +++ b/serverless/pages/project-settings/index-management.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-index-management]] +[[index-management]] = Index management :description: Perform CRUD operations on indices and data streams. View index settings, mappings, and statistics. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} Elastic's index management features are an easy, convenient way to manage your cluster's indices, data streams, index templates, and enrich policies. Practicing good index management ensures your data is stored correctly and in the most cost-effective way possible. @@ -46,7 +46,7 @@ To add these privileges, go to **Management → Custom Roles**. //// [discrete] -[[-serverless-index-management-manage-indices]] +[[index-management-manage-indices]] == Manage indices Go to **{project-settings} → {manage-app} → {index-manage-app}**: @@ -77,7 +77,7 @@ on multiple indices, select their checkboxes and then open the **Manage** menu. // TO-DO: This screenshot needs to be refreshed since it doesn't show the appropriate context [discrete] -[[-serverless-index-management-manage-data-streams]] +[[index-management-manage-data-streams]] == Manage data streams Investigate your data streams and address lifecycle management needs in the **Data Streams** view. @@ -107,7 +107,7 @@ TO-DO: This screenshot is not accurate since it contains several toggles that do //// [discrete] -[[-serverless-index-management-manage-index-templates]] +[[index-management-manage-index-templates]] == Manage index templates Create, edit, clone, and delete your index templates in the **Index Templates** view. Changes made to an index template do not affect existing indices. @@ -253,7 +253,7 @@ TO-DO:This page is missing information about the "Component templates" tab. //// [discrete] -[[-serverless-index-management-manage-enrich-policies]] +[[index-management-manage-enrich-policies]] == Manage enrich policies Use the **Enrich Policies** view to add data from your existing indices to incoming documents during ingest. diff --git a/serverless/pages/project-settings/index.asciidoc b/serverless/pages/project-settings/index.asciidoc new file mode 100644 index 0000000000..7f6ffb158b --- /dev/null +++ b/serverless/pages/project-settings/index.asciidoc @@ -0,0 +1,29 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + += Project and management settings + +include::./project-settings.asciidoc[leveloffset=+1] +include::./api-keys.asciidoc[leveloffset=+2] +include::./action-connectors.asciidoc[leveloffset=+2] +include::./custom-roles.asciidoc[leveloffset=+2] +include::./data-views.asciidoc[leveloffset=+2] +include::./files.asciidoc[leveloffset=+2] +include::./index-management.asciidoc[leveloffset=+2] +include::./ingest-pipelines.asciidoc[leveloffset=+2] +include::./logstash-pipelines.asciidoc[leveloffset=+2] +include::./machine-learning.asciidoc[leveloffset=+2] +include::./maintenance-windows.asciidoc[leveloffset=+2] +include::./maps.asciidoc[leveloffset=+2] +include::./reports.asciidoc[leveloffset=+2] +include::./rules.asciidoc[leveloffset=+2] +include::./saved-objects.asciidoc[leveloffset=+2] +include::./spaces.asciidoc[leveloffset=+2] +include::./tags.asciidoc[leveloffset=+2] +include::./transforms.asciidoc[leveloffset=+2] + +include::./integrations.asciidoc[leveloffset=+1] + +include::./fleet-and-elastic-agent.asciidoc[leveloffset=+1] diff --git a/serverless/pages/project-settings/ingest-pipelines.asciidoc b/serverless/pages/project-settings/ingest-pipelines.asciidoc index 0129961e9f..2f49829487 100644 --- a/serverless/pages/project-settings/ingest-pipelines.asciidoc +++ b/serverless/pages/project-settings/ingest-pipelines.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-ingest-pipelines]] +[[ingest-pipelines]] = {ingest-pipelines-cap} :description: Create and manage {ingest-pipelines} to perform common transformations and enrichments on your data. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} {ref}/ingest.html[{ingest-pipelines-cap}] let you perform common transformations on your data before indexing. For example, you can use pipelines to remove fields, extract values from text, and enrich your data. @@ -27,7 +27,7 @@ TBD: Do these requirements apply in serverless? //// [discrete] -[[-serverless-ingest-pipelines-create-and-manage-pipelines]] +[[ingest-pipelines-create-and-manage-pipelines]] == Create and manage pipelines In **{project-settings} → {manage-app} → {ingest-pipelines-app}**, you can: @@ -47,7 +47,7 @@ Mapping your custom data to ECS makes the data easier to search and lets you reu To get started, check {ecs-ref}/ecs-converting.html[Map custom data to ECS]. [discrete] -[[-serverless-ingest-pipelines-test-pipelines]] +[[ingest-pipelines-test-pipelines]] == Test pipelines Before you use a pipeline in production, you should test it using sample documents. diff --git a/serverless/pages/project-settings/integrations.asciidoc b/serverless/pages/project-settings/integrations.asciidoc index b8b061f31f..85a39014ab 100644 --- a/serverless/pages/project-settings/integrations.asciidoc +++ b/serverless/pages/project-settings/integrations.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-integrations]] +[[integrations]] = Integrations :description: Use our pre-built integrations to connect your data to Elastic. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {obs-badge} {sec-badge} Elastic integrations are a streamlined way to connect your data to Elastic. Integrations are available for popular services and platforms, like Nginx, AWS, and MongoDB, diff --git a/serverless/pages/project-settings/logstash-pipelines.asciidoc b/serverless/pages/project-settings/logstash-pipelines.asciidoc index bc22e44b02..00a0db7e2f 100644 --- a/serverless/pages/project-settings/logstash-pipelines.asciidoc +++ b/serverless/pages/project-settings/logstash-pipelines.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-logstash-pipelines]] +[[logstash-pipelines]] = {ls-pipelines} :description: Create, edit, and delete your {ls} pipeline configurations. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} In **{project-settings} → {manage-app} → {ls-pipelines-app}**, you can control multiple {ls} instances and pipeline configurations. @@ -22,7 +22,7 @@ The `pipelines.yml` file and settings such as `path.config` and `config.string` ==== [discrete] -[[-serverless-logstash-pipelines-manage-pipelines]] +[[logstash-pipelines-manage-pipelines]] == Manage pipelines //// diff --git a/serverless/pages/project-settings/machine-learning.asciidoc b/serverless/pages/project-settings/machine-learning.asciidoc index d51fbe9dfc..aa16218ad8 100644 --- a/serverless/pages/project-settings/machine-learning.asciidoc +++ b/serverless/pages/project-settings/machine-learning.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-machine-learning]] +[[machine-learning]] = {ml-cap} :description: View, export, and import {ml} jobs and models. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} To view your {ml} resources, go to **{project-settings} → {manage-app} → {ml-app}**: @@ -24,14 +24,14 @@ The {ml-features} that are available vary by project type: For more information, go to {ml-docs}/ml-ad-overview.html[{anomaly-detect-cap}], {ml-docs}/ml-dfanalytics.html[{dfanalytics-cap}] and {ml-docs}/ml-nlp.html[Natural language processing]. [discrete] -[[-serverless-machine-learning-synchronize-saved-objects]] +[[machine-learning-synchronize-saved-objects]] == Synchronize saved objects Before you can view your {ml} {dfeeds}, jobs, and trained models in {kib}, they must have saved objects. For example, if you used APIs to create your jobs, wait for automatic synchronization or go to the **{ml-app}** page and click **Synchronize saved objects**. [discrete] -[[-serverless-machine-learning-export-and-import-jobs]] +[[machine-learning-export-and-import-jobs]] == Export and import jobs You can export and import your {ml} job and {dfeed} configuration details on the **{ml-app}** page. diff --git a/serverless/pages/project-settings/maintenance-windows.asciidoc b/serverless/pages/project-settings/maintenance-windows.asciidoc index cc9cf91d0a..aa04c551cf 100644 --- a/serverless/pages/project-settings/maintenance-windows.asciidoc +++ b/serverless/pages/project-settings/maintenance-windows.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-maintenance-windows]] +[[maintenance-windows]] = {maint-windows-cap} :description: Suppress rule notifications for scheduled periods of time. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {obs-badge} {sec-badge} preview::[] @@ -34,7 +34,7 @@ For more details, refer to {kib} privileg //// [discrete] -[[-serverless-maintenance-windows-create-and-manage-maint-windows]] +[[maintenance-windows-create-and-manage-maint-windows]] == Create and manage {maint-windows} In **{project-settings} → {manage-app} → {maint-windows-app}** you can create, edit, and archive {maint-windows}. diff --git a/serverless/pages/project-settings/maps.asciidoc b/serverless/pages/project-settings/maps.asciidoc index c9a5400b8e..1dca2d1e90 100644 --- a/serverless/pages/project-settings/maps.asciidoc +++ b/serverless/pages/project-settings/maps.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-maps]] +[[maps]] = {maps-app} :description: Create maps from your geographical data. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {sec-badge} In **{project-settings} → {maps-app}** you can: @@ -23,7 +23,7 @@ In **{project-settings} → {maps-app}** you can: //// [discrete] -[[-serverless-maps-build-maps-with-multiple-layers-and-indices]] +[[maps-build-maps-with-multiple-layers-and-indices]] == Build maps with multiple layers and indices Use multiple layers and indices to show all your data in a single map. @@ -37,7 +37,7 @@ Go to **{project-settings} → {maps-app}** and click **Add layer**. To learn about specific types of layers, check out {kibana-ref}/heatmap-layer.html[Heat map layer], {kibana-ref}/tile-layer.html[Tile layer], and {kibana-ref}/vector-layer.html[Vector layer]. [discrete] -[[-serverless-maps-animate-spatial-temporal-data]] +[[maps-animate-spatial-temporal-data]] == Animate spatial temporal data Data comes to life with animation. @@ -53,19 +53,19 @@ image::images/timeslider_map.gif[An animated city map of Portland with changing To create this type of map, check out {kibana-ref}/asset-tracking-tutorial.html[Track, visualize, and alert assets in real time]. [discrete] -[[-serverless-maps-upload-geojson-files-and-shapefiles]] +[[maps-upload-geojson-files-and-shapefiles]] == Upload GeoJSON files and shapefiles Use **{maps-app}** to drag and drop your GeoJSON and shapefile data and then use them as layers in your map. Check out {kibana-ref}/import-geospatial-data.html[Import geospatial data]. [discrete] -[[-serverless-maps-embed-your-map-in-dashboards]] +[[maps-embed-your-map-in-dashboards]] == Embed your map in dashboards Viewing data from different angles provides better insights. Dimensions that are obscured in one visualization might be illuminated in another. -Add your map to a https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-dashboards[dashboard] and view your geospatial data alongside bar charts, pie charts, tag clouds, and more. +Add your map to a <> and view your geospatial data alongside bar charts, pie charts, tag clouds, and more. This choropleth map shows the density of non-emergency service requests in San Diego by council district. The map is embedded in a dashboard, so users can better understand when services are requested and gain insight into the top requested services. @@ -86,7 +86,7 @@ For example, use dark colors to symbolize areas with more web log traffic, and l //// [discrete] -[[-serverless-maps-focus-on-only-the-data-thats-important-to-you]] +[[maps-focus-on-only-the-data-thats-important-to-you]] == Focus on only the data that's important to you Search across the layers in your map to focus on just the data you want. diff --git a/serverless/pages/project-settings/project-and-management-settings.asciidoc b/serverless/pages/project-settings/project-and-management-settings.asciidoc index 6c2c4404b9..64725d5d5b 100644 --- a/serverless/pages/project-settings/project-and-management-settings.asciidoc +++ b/serverless/pages/project-settings/project-and-management-settings.asciidoc @@ -1,6 +1,3 @@ -[[-serverless-project-and-management-settings]] -= Project and management settings - :description: Learn about capabilities available in multiple serverless solutions. :keywords: serverless, observability, security, elasticsearch, overview @@ -9,16 +6,16 @@ preview:[] The documentation in this section describes shared capabilities that are available in multiple solutions. Look for the doc badge on each page to see if the page is valid for your solution: -* for the {es} solution -* for the {observability} solution -* for the {security} solution +* {es-badge} for the {es} solution +* {obs-badge} for the {observability} solution +* {sec-badge} for the {security} solution [IMPORTANT] ==== Some solutions provide versions of these capabilities tailored to your use case. Read the main solution docs to learn how to use those capabilities: -* https://www.elastic.co/docs/current/serverless/elasticsearch/what-is-elasticsearch-serverless[{es-serverless} docs] -* https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[{observability} serverless docs] -* https://www.elastic.co/docs/current/serverless/security/what-is-security-serverless[{security} serverless docs] +* <> +* <> +* <> ==== diff --git a/serverless/pages/project-settings/project-settings.asciidoc b/serverless/pages/project-settings/project-settings.asciidoc index 342d166b0a..d13ab982ca 100644 --- a/serverless/pages/project-settings/project-settings.asciidoc +++ b/serverless/pages/project-settings/project-settings.asciidoc @@ -1,9 +1,13 @@ -[[-serverless-project-settings]] +[[project-settings]] = Management settings :description: Manage your indices, data views, saved objects, settings, and more from a central location in Elastic. :keywords: serverless, management, overview +++++ +Management +++++ + preview:[] Go to **Project Settings** to manage your indices, data views, saved objects, settings, and more. @@ -15,76 +19,76 @@ To learn more about roles, refer to <>. |=== | Feature | Description | Available in -| https://www.elastic.co/docs/current/serverless/api-keys[API keys] +| <> | Create and manage keys that can send requests on behalf of users. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/security/asset-criticality[Asset criticality] +| <> | Bulk assign asset criticality to multiple entities by importing a text file. -| +| {sec-badge} -| https://www.elastic.co/docs/current/serverless/action-connectors[] +| <> | Create and manage reusable connectors for triggering actions. -| +| {es-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/data-views[] +| <> | Manage the fields in the data views that retrieve your data from {es}. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/security/entity-risk-scoring[Entity Risk Score] +| <> | Manage entity risk scoring, and preview risky entities. -| +| {sec-badge} -| https://www.elastic.co/docs/current/serverless/files[] +| <> | Manage files that are stored in {kib}. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/index-management[] +| <> | View index settings, mappings, and statistics and perform operations on indices. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/ingest-pipelines[] +| <> | Create and manage ingest pipelines that parse, transform, and enrich your data. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/logstash-pipelines[] +| <> | Create and manage {ls} pipelines that parse, transform, and enrich your data. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/machine-learning[] +| <> | View, export, and import your {anomaly-detect} and {dfanalytics} jobs and trained models. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/maintenance-windows[] +| <> | Suppress rule notifications for scheduled periods of time. -| +| {obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/maps[] +| <> | Create maps from your geographical data. -| +| {sec-badge} -| https://www.elastic.co/docs/current/serverless/reports[] +| <> | Manage and download reports such as CSV files generated from saved searches. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/rules[] +| <> | Create and manage rules that generate alerts. -| +| {es-badge} -| https://www.elastic.co/docs/current/serverless/saved-objects[] +| <> | Copy, edit, delete, import, and export your saved objects. These include dashboards, visualizations, maps, {data-sources}, and more. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/spaces[] +| <> | Organize your project and objects into multiple spaces. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/tags[] +| <> | Create, manage, and assign tags to your saved objects. -| +| {es-badge}{obs-badge}{sec-badge} -| https://www.elastic.co/docs/current/serverless/transforms[] +| <> | Use transforms to pivot existing {es} indices into summarized or entity-centric indices. -| +| {es-badge}{obs-badge}{sec-badge} |=== diff --git a/serverless/pages/project-settings/reports.asciidoc b/serverless/pages/project-settings/reports.asciidoc index 8c0739b184..505c38e051 100644 --- a/serverless/pages/project-settings/reports.asciidoc +++ b/serverless/pages/project-settings/reports.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-reports]] +[[reports]] = {reports-app} :description: View and manage generated reports. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} {kib} provides you with several options to share saved searches, dashboards, and visualizations. diff --git a/serverless/pages/project-settings/rules.asciidoc b/serverless/pages/project-settings/rules.asciidoc index d9db287a56..1b3dc2ff04 100644 --- a/serverless/pages/project-settings/rules.asciidoc +++ b/serverless/pages/project-settings/rules.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-rules]] +[[rules]] = {rules-app} :description: Alerting works by running checks on a schedule to detect conditions defined by a rule. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} In general, a rule consists of three parts: @@ -27,7 +27,7 @@ The following sections describe each part of the rule in more detail. */ //// [discrete] -[[-serverless-rules-conditions]] +[[rules-conditions]] == Conditions Each project type supports a specific set of rule types. @@ -40,7 +40,7 @@ image::images/es-query-rule-conditions.png[UI for defining rule conditions in an // NOTE: This is an autogenerated screenshot. Do not edit it directly. [discrete] -[[-serverless-rules-schedule]] +[[rules-schedule]] == Schedule // Rule schedules are defined as an interval between subsequent checks, and can range from a few seconds to months. @@ -56,7 +56,7 @@ The intervals of rule checks in {kib} are approximate. Their timing is affected ==== [discrete] -[[-serverless-rules-actions]] +[[rules-actions]] == Actions You can add one or more actions to your rule to generate notifications when its conditions are met. @@ -70,7 +70,7 @@ When defining actions in a rule, you specify: Each action uses a connector, which provides connection information for a {kib} service or third party integration, depending on where you want to send the notifications. The specific list of connectors that you can use in your rule vary by project type. -Refer to https://www.elastic.co/docs/current/serverless/action-connectors[]. +Refer to <>. // If no connectors exist, click **Add connector** to create one. @@ -130,7 +130,7 @@ When the rule detects the condition, it creates an alert containing the details //// [discrete] -[[-serverless-rules-action-variables]] +[[rules-action-variables]] === Action variables You can pass rule values to an action at the time a condition is detected. @@ -146,7 +146,7 @@ For more information about common action variables, refer to {kibana-ref}/rule-a // missing link [discrete] -[[-serverless-rules-alerts]] +[[rules-alerts]] == Alerts When checking for a condition, a rule might identify multiple occurrences of the condition. @@ -159,7 +159,7 @@ This means a separate email is sent for each server that exceeds the threshold w // ![{kib} tracks each detected condition as an alert and takes action on each alert](../images/alerting.svg) [discrete] -[[-serverless-rules-putting-it-all-together]] +[[rules-putting-it-all-together]] == Putting it all together A rule consists of conditions, actions, and a schedule. diff --git a/serverless/pages/project-settings/saved-objects.asciidoc b/serverless/pages/project-settings/saved-objects.asciidoc index 5254d1041d..f12290f4e8 100644 --- a/serverless/pages/project-settings/saved-objects.asciidoc +++ b/serverless/pages/project-settings/saved-objects.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-saved-objects]] +[[saved-objects]] = Saved objects :description: Manage your saved objects, including dashboards, visualizations, maps, {data-sources}, and more. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} To get started, go to **{project-settings} → {manage-app} → {saved-objects-app}**: @@ -32,7 +32,7 @@ applications they may not otherwise be authorized to access. //// [discrete] -[[-serverless-saved-objects-view-and-delete]] +[[saved-objects-view-and-delete]] == View and delete * To view and edit a saved object in its associated application, click the object title. @@ -40,7 +40,7 @@ applications they may not otherwise be authorized to access. * To delete one or more objects, select their checkboxes, and then click **Delete**. [discrete] -[[-serverless-saved-objects-import-and-export]] +[[saved-objects-import-and-export]] == Import and export Use import and export to move objects between different {kib} instances. @@ -56,7 +56,7 @@ TBD: Do these APIs exist for serverless? //// [discrete] -[[-serverless-saved-objects-import]] +[[saved-objects-import]] === Import Import multiple objects in a single operation. @@ -79,7 +79,7 @@ size of the file that you can import. //// [discrete] -[[-serverless-saved-objects-export]] +[[saved-objects-export]] === Export Export objects by selection or type. @@ -100,7 +100,7 @@ The `s //// [discrete] -[[-serverless-saved-objects-copy-to-other-spaces]] +[[saved-objects-copy-to-other-spaces]] == Copy to other spaces Copy saved objects and their related objects between spaces. diff --git a/serverless/pages/project-settings/spaces.asciidoc b/serverless/pages/project-settings/spaces.asciidoc index 54be346adf..22e2fc0888 100644 --- a/serverless/pages/project-settings/spaces.asciidoc +++ b/serverless/pages/project-settings/spaces.asciidoc @@ -1,9 +1,9 @@ -[[-serverless-spaces]] +[[spaces]] = Spaces :description: Organize your project and objects into multiple spaces. -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} Spaces enable you to organize your dashboards and other saved objects into meaningful categories. Once inside a space, you see only @@ -19,13 +19,13 @@ image::images/space-breadcrumb.png[Space breadcrumb] You can view and manage the spaces of a project from the **Spaces** page in **Management**. [discrete] -[[-serverless-spaces-required-permissions]] +[[spaces-required-permissions]] == Required permissions You must have an admin role on the project to manage its **Spaces**. [discrete] -[[-serverless-spaces-create-or-edit-a-space]] +[[spaces-create-or-edit-a-space]] == Create or edit a space You can have up to 100 spaces in a project. @@ -43,16 +43,16 @@ if you prefer to create spaces programmatically. ifeval::["{serverlessCustomRoles}" == "true"] [discrete] -[[-serverless-spaces-customize-access-to-space]] +[[spaces-customize-access-to-space]] == Customize access to space -Customizing access to a space is available for the following project types only: +Customizing access to a space is available for the following project types only: {es-badge} {sec-badge} -As an administrator, you can define custom roles with specific access to certain spaces and features in a project. Refer to https://www.elastic.co/docs/current/serverless/custom-roles[]. +As an administrator, you can define custom roles with specific access to certain spaces and features in a project. Refer to <>. endif::[] [discrete] -[[-serverless-spaces-delete-a-space]] +[[spaces-delete-a-space]] == Delete a space Deleting a space permanently removes the space and all of its contents. @@ -61,7 +61,7 @@ Find the space on the _Spaces_ page and click the trash icon in the Actions colu You can't delete the default space, but you can customize it to your liking. [discrete] -[[-serverless-spaces-move-saved-objects-between-spaces]] +[[spaces-move-saved-objects-between-spaces]] == Move saved objects between spaces -To move saved objects between spaces, you can https://www.elastic.co/docs/current/serverless/saved-objects[copy objects] or https://www.elastic.co/docs/current/serverless/saved-objects[export and import objects]. +To move saved objects between spaces, you can <> or <>. diff --git a/serverless/pages/project-settings/tags.asciidoc b/serverless/pages/project-settings/tags.asciidoc index e418ccca65..51abeee359 100644 --- a/serverless/pages/project-settings/tags.asciidoc +++ b/serverless/pages/project-settings/tags.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-tags]] +[[tags]] = {tags-app} :description: Use tags to categorize your saved objects, then filter for related objects based on shared tags. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} To get started, go to **{project-settings} → {manage-app} → {tags-app}**: @@ -35,7 +35,7 @@ from the global search. //// [discrete] -[[-serverless-tags-create-a-tag]] +[[tags-create-a-tag]] == Create a tag Create a tag to assign to your saved objects. @@ -47,7 +47,7 @@ The name cannot be longer than 50 characters. . Click **Create tag**. [discrete] -[[-serverless-tags-assign-a-tag-to-an-object]] +[[tags-assign-a-tag-to-an-object]] == Assign a tag to an object //// @@ -65,7 +65,7 @@ image::images/tag-assignment.png[Assign tags to saved objects] . Click **Save tag assignments**. [discrete] -[[-serverless-tags-delete-a-tag]] +[[tags-delete-a-tag]] == Delete a tag When you delete a tag, you remove it from all saved objects that use it. diff --git a/serverless/pages/project-settings/transforms.asciidoc b/serverless/pages/project-settings/transforms.asciidoc index a95f25fcf4..94e79b2524 100644 --- a/serverless/pages/project-settings/transforms.asciidoc +++ b/serverless/pages/project-settings/transforms.asciidoc @@ -1,4 +1,4 @@ -[[-serverless-transforms]] +[[transforms]] = {transforms-app} :description: Use transforms to pivot existing indices into summarized or entity-centric indices. @@ -6,7 +6,7 @@ preview:[] -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} {transforms-cap} enable you to convert existing {es} indices into summarized indices, which provide opportunities for new insights and analytics. @@ -28,7 +28,7 @@ For more information, check out: * {ref}/transform-limitations.html[Limitations] [discrete] -[[-serverless-transforms-create-and-manage-transforms]] +[[transforms-create-and-manage-transforms]] == Create and manage {transforms} In **{project-settings} → {manage-app} → {transforms-app}**, you can From 3d68b2d73134001a03f9ce540c97bfa413295474 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Tue, 29 Oct 2024 18:20:44 -0500 Subject: [PATCH 05/25] fix broken link --- serverless/pages/project-settings/maintenance-windows.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/serverless/pages/project-settings/maintenance-windows.asciidoc b/serverless/pages/project-settings/maintenance-windows.asciidoc index aa04c551cf..1650b7df28 100644 --- a/serverless/pages/project-settings/maintenance-windows.asciidoc +++ b/serverless/pages/project-settings/maintenance-windows.asciidoc @@ -59,7 +59,7 @@ image::images/create-maintenance-window-filter.png[The Create Maintenance Window ==== * You can select only a single category when you turn on filters. * Some rules are not affected by maintenance window filters because their alerts do not contain requisite data. -In particular, {kibana-ref}/kibana-alerts.html[{stack-monitor-app}], {kibana-ref}geo-alerting.html[tracking containment], {ml-docs}/ml-configuring-alerts.html[{anomaly-jobs} health], and {ref}/transform-alerts.html[transform health] rules are not affected by the filters. +In particular, {kibana-ref}/kibana-alerts.html[{stack-monitor-app}], {kibana-ref}/geo-alerting.html[tracking containment], {ml-docs}/ml-configuring-alerts.html[{anomaly-jobs} health], and {ref}/transform-alerts.html[transform health] rules are not affected by the filters. ==== A maintenance window can have any one of the following statuses: From 67ad1970f19a2768b8ed554e66de2bb42bf5b27d Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Tue, 29 Oct 2024 19:14:10 -0500 Subject: [PATCH 06/25] clean up links --- serverless/index.asciidoc | 1 + .../devtools/debug-grok-expressions.asciidoc | 2 +- .../devtools/debug-painless-scripts.asciidoc | 2 +- .../developer-tools-troubleshooting.asciidoc | 20 ++++----- .../devtools/general-developer-tools.asciidoc | 8 ++-- .../profile-queries-and-aggregations.asciidoc | 4 +- .../run-api-requests-in-the-console.asciidoc | 28 ++++++------- .../apis-elasticsearch-conventions.asciidoc | 14 +++---- .../elasticsearch/apis-http-apis.asciidoc | 6 +-- .../apis-kibana-conventions.asciidoc | 4 +- .../clients-dot-net-getting-started.asciidoc | 24 +++++------ .../clients-go-getting-started.asciidoc | 28 ++++++------- .../clients-java-getting-started.asciidoc | 26 ++++++------ .../clients-nodejs-getting-started.asciidoc | 28 ++++++------- .../clients-php-getting-started.asciidoc | 26 ++++++------ .../clients-python-getting-started.asciidoc | 28 ++++++------- .../clients-ruby-getting-started.asciidoc | 34 +++++++-------- .../pages/elasticsearch/clients.asciidoc | 16 ++++---- .../elasticsearch-developer-tools.asciidoc | 4 +- .../explore-your-data-alerting.asciidoc | 10 ++--- ...lore-your-data-discover-your-data.asciidoc | 14 +++---- ...re-your-data-the-aggregations-api.asciidoc | 24 +++++------ ...alize-your-data-create-dashboards.asciidoc | 16 ++++---- ...e-your-data-create-visualizations.asciidoc | 34 +++++++-------- ...ore-your-data-visualize-your-data.asciidoc | 4 +- .../elasticsearch/explore-your-data.asciidoc | 10 ++--- .../pages/elasticsearch/get-started.asciidoc | 41 ++++++++++--------- ...your-data-ingest-data-through-api.asciidoc | 12 +++--- ...t-data-through-integrations-beats.asciidoc | 4 +- ...ugh-integrations-connector-client.asciidoc | 39 ++++++++++-------- ...-through-integrations-connector-client.mdx | 8 ++-- ...ata-through-integrations-logstash.asciidoc | 16 ++++---- .../ingest-your-data-upload-file.asciidoc | 4 +- .../elasticsearch/ingest-your-data.asciidoc | 12 +++--- .../pages/elasticsearch/knn-search.asciidoc | 24 +++++------ .../elasticsearch/search-playground.asciidoc | 2 +- .../search-your-data-the-search-api.asciidoc | 2 +- .../elasticsearch/search-your-data.asciidoc | 6 +-- .../serverless-differences.asciidoc | 8 ++-- .../technical-preview-limitations.asciidoc | 4 +- .../what-is-elasticsearch-serverless.asciidoc | 16 ++++---- ...cess-to-org-from-existing-account.asciidoc | 2 +- .../manage-access-to-org-user-roles.asciidoc | 14 +++---- .../general/manage-access-to-org.asciidoc | 6 +-- ...manage-billing-check-subscription.asciidoc | 2 +- .../general/manage-billing-history.asciidoc | 2 +- .../manage-billing-monitor-usage.asciidoc | 4 +- .../manage-billing-pricing-model.asciidoc | 6 +-- .../manage-billing-stop-project.asciidoc | 2 +- .../pages/general/manage-billing.asciidoc | 12 +++--- serverless/pages/general/manage-org.asciidoc | 8 ++-- .../manage-your-project-rest-api.asciidoc | 26 ++++++------ .../general/manage-your-project.asciidoc | 20 ++++----- .../pages/general/service-status.asciidoc | 4 +- serverless/pages/general/sign-up.asciidoc | 14 +++---- .../pages/general/user-profile.asciidoc | 8 ++-- .../pages/general/what-is-serverless.asciidoc | 8 ++-- .../project-settings/custom-roles.asciidoc | 26 ++++++------ .../pages/project-settings/files.asciidoc | 2 +- .../project-settings/maintenance-windows.mdx | 2 +- .../pages/project-settings/maps.asciidoc | 2 +- .../project-settings.asciidoc | 6 +-- .../pages/welcome-to-serverless.asciidoc | 10 ++--- 63 files changed, 403 insertions(+), 396 deletions(-) diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index 0f6b391289..092edd47fb 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -227,6 +227,7 @@ include::{observability-serverless}/alerting/create-failed-transaction-rate-thre include::{observability-serverless}/alerting/create-inventory-threshold-alert-rule.asciidoc[leveloffset=+4] include::{observability-serverless}/alerting/create-latency-threshold-alert-rule.asciidoc[leveloffset=+4] include::{observability-serverless}/alerting/create-slo-burn-rate-alert-rule.asciidoc[leveloffset=+4] +include::{observability-serverless}/alerting/synthetic-monitor-status-alert.asciidoc[leveloffset=+4] include::{observability-serverless}/alerting/aggregation-options.asciidoc[leveloffset=+3] include::{observability-serverless}/alerting/rate-aggregation.asciidoc[leveloffset=+4] include::{observability-serverless}/alerting/view-alerts.asciidoc[leveloffset=+3] diff --git a/serverless/pages/devtools/debug-grok-expressions.asciidoc b/serverless/pages/devtools/debug-grok-expressions.asciidoc index d65939e661..675f0342f2 100644 --- a/serverless/pages/devtools/debug-grok-expressions.asciidoc +++ b/serverless/pages/devtools/debug-grok-expressions.asciidoc @@ -1,4 +1,4 @@ -[[debug-grok-expressions]] +[[devtools-debug-grok-expressions]] = Grok Debugger :description: Build and debug grok patterns before you use them in your data processing pipelines. diff --git a/serverless/pages/devtools/debug-painless-scripts.asciidoc b/serverless/pages/devtools/debug-painless-scripts.asciidoc index a832b84345..5e96e536be 100644 --- a/serverless/pages/devtools/debug-painless-scripts.asciidoc +++ b/serverless/pages/devtools/debug-painless-scripts.asciidoc @@ -1,4 +1,4 @@ -[[debug-painless-scripts]] +[[devtools-debug-painless-scripts]] = Painless Lab :description: Use our interactive code editor to test and debug Painless scripts in real-time. diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc index 457fe0229d..4a01078719 100644 --- a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc +++ b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc @@ -1,4 +1,4 @@ -[[dev-tools-troubleshooting]] +[[devtools-dev-tools-troubleshooting]] = Troubleshooting :description: Troubleshoot searches. @@ -11,7 +11,7 @@ or results in an unexpected order. This guide describes how to troubleshoot searches. [discrete] -[[dev-tools-troubleshooting-ensure-the-data-stream-index-or-alias-exists]] +[[devtools-dev-tools-troubleshooting-ensure-the-data-stream-index-or-alias-exists]] == Ensure the data stream, index, or alias exists Elasticsearch returns an `index_not_found_exception` when the data stream, index @@ -44,7 +44,7 @@ GET /my-alias/_search?ignore_unavailable=true ---- [discrete] -[[dev-tools-troubleshooting-ensure-the-data-stream-or-index-contains-data]] +[[devtools-dev-tools-troubleshooting-ensure-the-data-stream-or-index-contains-data]] == Ensure the data stream or index contains data When a search request returns no hits, the data stream or index may contain no @@ -71,7 +71,7 @@ configured with the correct time field. ==== [discrete] -[[dev-tools-troubleshooting-check-that-the-field-exists-and-its-capabilities]] +[[devtools-dev-tools-troubleshooting-check-that-the-field-exists-and-its-capabilities]] == Check that the field exists and its capabilities Querying a field that does not exist will not return any results. @@ -115,7 +115,7 @@ searchable and aggregatable. <3> The field is aggregatable in this index. [discrete] -[[dev-tools-troubleshooting-check-the-fields-mappings]] +[[devtools-dev-tools-troubleshooting-check-the-fields-mappings]] == Check the field's mappings A field's capabilities are determined by its {ref}/mapping.html[mapping]. @@ -143,7 +143,7 @@ GET /my-index-000001/_analyze To change the mapping of an existing field use the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-mapping-1[**Update mapping API**]. [discrete] -[[dev-tools-troubleshooting-check-the-fields-values]] +[[devtools-dev-tools-troubleshooting-check-the-fields-values]] == Check the field's values Use the `exists` query to check whether there are @@ -203,7 +203,7 @@ If the field does not return any values, check the data ingestion process. The field may have a different name. [discrete] -[[dev-tools-troubleshooting-check-the-latest-value]] +[[devtools-dev-tools-troubleshooting-check-the-latest-value]] == Check the latest value For time-series data, confirm there is non-filtered data within the attempted @@ -218,7 +218,7 @@ GET /my-index-000001/_search?sort=@timestamp:desc&size=1 ---- [discrete] -[[dev-tools-troubleshooting-validate-explain-and-profile-queries]] +[[devtools-dev-tools-troubleshooting-validate-explain-and-profile-queries]] == Validate, explain, and profile queries When a query returns unexpected results, Elasticsearch offers several tools to @@ -260,7 +260,7 @@ GET /my-index-000001/_explain/0 The {ref}/search-profile.html[**Profile API**] provides detailed timing information about a search request. For a visual representation of the results, use the -<>. +<>. [NOTE] ==== @@ -270,7 +270,7 @@ You can now copy the query sent to {es} for further analysis in Console. ==== [discrete] -[[dev-tools-troubleshooting-check-index-settings]] +[[devtools-dev-tools-troubleshooting-check-index-settings]] == Check index settings Index settings diff --git a/serverless/pages/devtools/general-developer-tools.asciidoc b/serverless/pages/devtools/general-developer-tools.asciidoc index ac24b5aa11..bcc08df6e0 100644 --- a/serverless/pages/devtools/general-developer-tools.asciidoc +++ b/serverless/pages/devtools/general-developer-tools.asciidoc @@ -6,19 +6,19 @@ preview:[] |=== | Feature | Description | Available in -| <> +| <> | Interact with Elastic REST APIs. | {es-badge}{obs-badge}{sec-badge} -| <> +| <> | Inspect and analyze your search queries. | {es-badge}{obs-badge}{sec-badge} -| <> +| <> | Build and debug grok patterns before you use them in your data processing pipelines. | {es-badge}{obs-badge}{sec-badge} -| <> +| <> | Use an interactive code editor to test and debug Painless scripts in real time. | {obs-badge}{sec-badge} |=== diff --git a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc index 23d1ea9e75..0773a029dd 100644 --- a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc +++ b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc @@ -1,4 +1,4 @@ -[[profile-queries-and-aggregations]] +[[devtools-profile-queries-and-aggregations]] = Search Profiler :description: Diagnose and debug poorly performing search queries. @@ -59,7 +59,7 @@ breakdown of low-level methods. For more information, refer to {ref}/search-profile.html#profiling-queries[Profiling queries] in the {es} documentation. [discrete] -[[profile-queries-and-aggregations-filter-for-an-index-or-type]] +[[devtools-profile-queries-and-aggregations-filter-for-an-index-or-type]] == Filter for an index or type By default, all queries executed by the **{searchprofiler}** are sent diff --git a/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc b/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc index f43964a2d6..d17ed74dc2 100644 --- a/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc +++ b/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc @@ -1,4 +1,4 @@ -[[run-api-requests-in-the-console]] +[[devtools-run-api-requests-in-the-console]] = Console :description: Use the Console to interact with Elastic REST APIs. @@ -20,7 +20,7 @@ To go to **Console**, find **Dev Tools** in the navigation menu or use the globa You can also find Console directly on your Elasticsearch serverless project pages, where you can expand it from the footer. This Console, called **Persistent Console**, has the same capabilities and shares the same history as the Console in **Dev Tools**. [discrete] -[[run-api-requests-in-the-console-write-requests]] +[[devtools-run-api-requests-in-the-console-write-requests]] == Write requests **Console** understands commands in a cURL-like syntax. @@ -52,16 +52,16 @@ curl "${ES_URL}/_search" \ ---- [discrete] -[[run-api-requests-in-the-console-autocomplete]] +[[devtools-run-api-requests-in-the-console-autocomplete]] === Autocomplete When you're typing a command, **Console** makes context-sensitive suggestions. These suggestions show you the parameters for each API and speed up your typing. -You can configure your preferences for autocomplete in the <>. +You can configure your preferences for autocomplete in the <>. [discrete] -[[run-api-requests-in-the-console-comments]] +[[devtools-run-api-requests-in-the-console-comments]] === Comments You can write comments or temporarily disable parts of a request by using double forward slashes (`//`) or pound (`#`) signs to create single-line comments. @@ -96,7 +96,7 @@ GET /_search ---- [discrete] -[[run-api-requests-in-the-console-variables]] +[[devtools-run-api-requests-in-the-console-variables]] === Variables Select **Variables** to create, edit, and delete variables. @@ -124,7 +124,7 @@ object by removing nearby quotes instead of a string with surrounding quotes. Tr quotes overwrite this default behavior and enforce simple replacement as a string. [discrete] -[[run-api-requests-in-the-console-auto-formatting]] +[[devtools-run-api-requests-in-the-console-auto-formatting]] === Auto-formatting The auto-formatting @@ -132,7 +132,7 @@ capability can help you format requests to be more readable. Select one or more want to format, open the contextual menu, and then select **Auto indent**. [discrete] -[[run-api-requests-in-the-console-keyboard-shortcuts]] +[[devtools-run-api-requests-in-the-console-keyboard-shortcuts]] === Keyboard shortcuts **Go to line number**: `Ctrl/Cmd` + `L` @@ -154,14 +154,14 @@ want to format, open the contextual menu, and then select **Auto indent**. **Navigate items in autocomplete menu**: `↓` + `↑` [discrete] -[[run-api-requests-in-the-console-view-api-docs]] +[[devtools-run-api-requests-in-the-console-view-api-docs]] === View API docs To view the documentation for an API endpoint, select the request, then open the contextual menu and select _Open API reference_. [discrete] -[[run-api-requests-in-the-console-run-requests]] +[[devtools-run-api-requests-in-the-console-run-requests]] == Run requests When you're ready to submit the request, select the play button. @@ -178,7 +178,7 @@ when you're debugging an issue or trying query combinations in multiple scenarios. [discrete] -[[run-api-requests-in-the-console-import-and-export-requests]] +[[devtools-run-api-requests-in-the-console-import-and-export-requests]] == Import and export requests You can export requests: @@ -194,7 +194,7 @@ When importing a TXT file containing Console requests, the current content of th When running copied requests from an external environment, you'll need to add https://www.elastic.co/docs/api/doc/serverless/authentication[authentication information] to the request. [discrete] -[[run-api-requests-in-the-console-get-your-request-history]] +[[devtools-run-api-requests-in-the-console-get-your-request-history]] == Get your request history _Console_ maintains a list of the last 500 requests that you tried to execute. @@ -203,13 +203,13 @@ To view them, open the _History_ tab. You can run a request from your history again by selecting the request and clicking **Add and run**. If you want to add it back to the Console input panel without running it yet, click **Add** instead. It is added to the editor at the current cursor position. [discrete] -[[run-api-requests-in-the-console-configure-console-settings]] +[[devtools-run-api-requests-in-the-console-configure-console-settings]] == Configure Console settings Go to the **Config** tab of **Console** to customize its display, autocomplete, and accessibility settings. [discrete] -[[run-api-requests-in-the-console-disable-console]] +[[devtools-run-api-requests-in-the-console-disable-console]] == Disable Console You can disable the persistent console that shows in the footer of your {es} project pages. To do that, go to **Management** > **Advanced Settings**, and turn off the `devTools:enablePersistentConsole` setting. diff --git a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc index e92fcd0509..27b5167fc7 100644 --- a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc +++ b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc @@ -1,4 +1,4 @@ -[[api-conventions]] +[[elasticsearch-api-conventions]] = Elasticsearch API conventions :description: The {es} REST APIs have conventions for headers and request bodies. @@ -17,14 +17,14 @@ GET _cat/indices?v=true Check out https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[]. [discrete] -[[api-conventions-request-headers]] +[[elasticsearch-api-conventions-request-headers]] == Request headers When you call {es} APIs outside of the Console, you must provide a request header. The {es} APIs support the `Authorization`, `Content-Type`, and `X-Opaque-Id` headers. [discrete] -[[api-conventions-authorization]] +[[elasticsearch-api-conventions-authorization]] === Authorization {es} APIs use key-based authentication. @@ -37,10 +37,10 @@ curl -X GET "${ES_URL}/_cat/indices?v=true" \ -H "Authorization: ApiKey ${API_KEY}" ---- -To get API keys or the Elasticsearch Endpoint (`${ES_URL}`) for a project, refer to <>. +To get API keys or the Elasticsearch Endpoint (`${ES_URL}`) for a project, refer to <>. [discrete] -[[api-conventions-content-type]] +[[elasticsearch-api-conventions-content-type]] === Content-type The type of the content sent in a request body must be specified using the `Content-Type` header. @@ -91,7 +91,7 @@ For example, a `traceparent` value of `00-0af7651916cd43dd8448eb211c80319c-b7ad6 //// [discrete] -[[api-conventions-x-opaque-id]] +[[elasticsearch-api-conventions-x-opaque-id]] === X-Opaque-Id You can pass an `X-Opaque-Id` HTTP header to track the origin of a request in {es} logs and tasks. @@ -142,7 +142,7 @@ Don't generate a unique `X-Opaque-Id` header for every request. Too many unique `X-Opaque-Id` values can prevent {es} from deduplicating warnings in the deprecation logs. [discrete] -[[api-conventions-request-bodies]] +[[elasticsearch-api-conventions-request-bodies]] == Request bodies A number of {es} APIs with GET operations--most notably the search API--support a request body. diff --git a/serverless/pages/elasticsearch/apis-http-apis.asciidoc b/serverless/pages/elasticsearch/apis-http-apis.asciidoc index f2ebbe4695..61628e0016 100644 --- a/serverless/pages/elasticsearch/apis-http-apis.asciidoc +++ b/serverless/pages/elasticsearch/apis-http-apis.asciidoc @@ -1,4 +1,4 @@ -[[http-apis]] +[[elasticsearch-http-apis]] = REST APIs :description: {es} and {kib} expose REST APIs that can be called directly to configure and access {stack} features. @@ -6,6 +6,6 @@ preview:[] -* <> -* <> +* <> +* <> * https://www.elastic.co/docs/api/[API Reference]: Explore the reference information for Elastic Serverless REST APIs diff --git a/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc b/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc index 443521e70c..67e49c4a81 100644 --- a/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc +++ b/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc @@ -1,4 +1,4 @@ -[[kibana-api-conventions]] +[[elasticsearch-kibana-api-conventions]] = Management API conventions :description: The Management APIs for {serverless-short} have request header conventions. @@ -32,7 +32,7 @@ GET kbn:/api/data_views Check out https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[]. [discrete] -[[kibana-api-conventions-request-headers]] +[[elasticsearch-kibana-api-conventions-request-headers]] == Request headers When you call Management APIs outside of the Console, you must provide a request header. diff --git a/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc index faff9202b2..9a9bf61c33 100644 --- a/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[dot-net-client-getting-started]] +[[elasticsearch-dot-net-client-getting-started]] = Get started with the serverless .NET client :description: Set up and use the .NET client for {es3}. @@ -11,13 +11,13 @@ This page guides you through the installation process of the {es} operations with it. [discrete] -[[dot-net-client-getting-started-requirements]] +[[elasticsearch-dot-net-client-getting-started-requirements]] == Requirements * .NET Core, .NET 5+ or .NET Framework (4.6.1 and higher). [discrete] -[[dot-net-client-getting-started-installation]] +[[elasticsearch-dot-net-client-getting-started-installation]] == Installation You can install the .NET client with the following command: @@ -28,7 +28,7 @@ dotnet add package Elastic.Clients.Elasticsearch.Serverless ---- [discrete] -[[dot-net-client-getting-started-initialize-the-client]] +[[elasticsearch-dot-net-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -38,17 +38,17 @@ Initialize the client using your API key and Elasticsearch Endpoint: var client = new ElasticsearchClient("", new ApiKey("")); ---- -To get API keys or the Elasticsearch Endpoint for a project, see <>. +To get API keys or the Elasticsearch Endpoint for a project, see <>. [discrete] -[[dot-net-client-getting-started-using-the-api]] +[[elasticsearch-dot-net-client-getting-started-using-the-api]] == Using the API After you've initialized the client, you can create an index and start ingesting documents. [discrete] -[[dot-net-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-dot-net-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents The following is an example of creating a `my_index` index: @@ -73,7 +73,7 @@ var response = await client.IndexAsync(doc, "my_index"); ---- [discrete] -[[dot-net-client-getting-started-getting-documents]] +[[elasticsearch-dot-net-client-getting-started-getting-documents]] === Getting documents You can get documents by using the following code: @@ -89,7 +89,7 @@ if (response.IsValidResponse) ---- [discrete] -[[dot-net-client-getting-started-searching]] +[[elasticsearch-dot-net-client-getting-started-searching]] === Searching This is how you can create a single match query with the .NET client: @@ -112,7 +112,7 @@ if (response.IsValidResponse) ---- [discrete] -[[dot-net-client-getting-started-updating-a-document]] +[[elasticsearch-dot-net-client-getting-started-updating-a-document]] === Updating a document This is how you can update a document, for example to add a new field: @@ -126,7 +126,7 @@ var response = await client.UpdateAsync("my_index", 1, u => u ---- [discrete] -[[dot-net-client-getting-started-deleting-a-document]] +[[elasticsearch-dot-net-client-getting-started-deleting-a-document]] === Deleting a document [source,net] @@ -135,7 +135,7 @@ var response = await client.DeleteAsync("my_index", 1); ---- [discrete] -[[dot-net-client-getting-started-deleting-an-index]] +[[elasticsearch-dot-net-client-getting-started-deleting-an-index]] === Deleting an index [source,net] diff --git a/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc index 9d3e782e6f..b5d80b40ae 100644 --- a/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[go-client-getting-started]] +[[elasticsearch-go-client-getting-started]] = Get started with the serverless Go Client :description: Set up and use the Go client for {es3}. @@ -11,17 +11,17 @@ client for {es3}, shows you how to initialize the client, and how to perform bas {es} operations with it. [discrete] -[[go-client-getting-started-requirements]] +[[elasticsearch-go-client-getting-started-requirements]] == Requirements * Go 1.20 or higher installed on your system. [discrete] -[[go-client-getting-started-installation]] +[[elasticsearch-go-client-getting-started-installation]] == Installation [discrete] -[[go-client-getting-started-using-the-command-line]] +[[elasticsearch-go-client-getting-started-using-the-command-line]] === Using the command line You can install the Go client with the following @@ -33,7 +33,7 @@ go get -u github.com/elastic/elasticsearch-serverless-go@latest ---- [discrete] -[[go-client-getting-started-imports]] +[[elasticsearch-go-client-getting-started-imports]] == Imports The following snippets use these imports: @@ -54,7 +54,7 @@ import ( ---- [discrete] -[[go-client-getting-started-initialize-the-client]] +[[elasticsearch-go-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -70,10 +70,10 @@ if err != nil { } ---- -To get API keys or the Elasticsearch Endpoint for a project, see <>. +To get API keys or the Elasticsearch Endpoint for a project, see <>. [discrete] -[[go-client-getting-started-using-the-api]] +[[elasticsearch-go-client-getting-started-using-the-api]] == Using the API After you've initialized the client, you can start ingesting documents. You can @@ -81,7 +81,7 @@ use the `bulk` API for this. This API enables you to index, update, and delete several documents in one request. [discrete] -[[go-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-go-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents You can call the `bulk` API with a body parameter, an array of hashes that @@ -130,7 +130,7 @@ response object. You can access the body values directly as seen on the previous example with `bulkRes`. [discrete] -[[go-client-getting-started-getting-documents]] +[[elasticsearch-go-client-getting-started-getting-documents]] === Getting documents You can get documents by using the following code: @@ -149,7 +149,7 @@ fmt.Printf("Get book: %#v\n", book) ---- [discrete] -[[go-client-getting-started-searching]] +[[elasticsearch-go-client-getting-started-searching]] === Searching Now that some data is available, you can search your documents using the @@ -177,7 +177,7 @@ fmt.Printf("Search books: %#v\n", bookSearch) ---- [discrete] -[[go-client-getting-started-updating-a-document]] +[[elasticsearch-go-client-getting-started-updating-a-document]] === Updating a document You can call the `Update` API to update a document, in this example updating the @@ -202,7 +202,7 @@ if updateRes.Result == result.Updated { ---- [discrete] -[[go-client-getting-started-deleting-a-document]] +[[elasticsearch-go-client-getting-started-deleting-a-document]] === Deleting a document You can call the `Delete` API to delete a document: @@ -220,7 +220,7 @@ if deleteRes.Result == result.Deleted { ---- [discrete] -[[go-client-getting-started-deleting-an-index]] +[[elasticsearch-go-client-getting-started-deleting-an-index]] === Deleting an index [source,go] diff --git a/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc index d04d1a4291..317644ed25 100644 --- a/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[java-client-getting-started]] +[[elasticsearch-java-client-getting-started]] = Get started with the serverless Java client :description: Set up and use the Java client for {es3}. @@ -11,7 +11,7 @@ client for {es3}, shows you how to initialize the client, and how to perform bas {es} operations with it. [discrete] -[[java-client-getting-started-requirements]] +[[elasticsearch-java-client-getting-started-requirements]] == Requirements * Java 8 or later. @@ -20,14 +20,14 @@ your application classes with the {es} API. The examples below show usage with Jackson. [discrete] -[[java-client-getting-started-installation]] +[[elasticsearch-java-client-getting-started-installation]] == Installation You can add the Java client to your Java project using either Gradle or Maven. [discrete] -[[java-client-getting-started-using-gradle]] +[[elasticsearch-java-client-getting-started-using-gradle]] === Using Gradle You can install the Java client as a Gradle dependency: @@ -41,7 +41,7 @@ dependencies { ---- [discrete] -[[java-client-getting-started-using-maven]] +[[elasticsearch-java-client-getting-started-using-maven]] === Using Maven You can install the Java client as a Maven dependency, add @@ -69,7 +69,7 @@ the following to the `pom.xml` of your project: ---- [discrete] -[[java-client-getting-started-initialize-the-client]] +[[elasticsearch-java-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -96,16 +96,16 @@ ElasticsearchTransport transport = new RestClientTransport( ElasticsearchClient esClient = new ElasticsearchClient(transport); ---- -To get API keys or the Elasticsearch Endpoint for a project, see <>. +To get API keys or the Elasticsearch Endpoint for a project, see <>. [discrete] -[[java-client-getting-started-using-the-api]] +[[elasticsearch-java-client-getting-started-using-the-api]] == Using the API After you initialized the client, you can start ingesting documents. [discrete] -[[java-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-java-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents The following is an example of indexing a document, here a `Product` application @@ -125,7 +125,7 @@ logger.info("Indexed with version " + response.version()); ---- [discrete] -[[java-client-getting-started-searching]] +[[elasticsearch-java-client-getting-started-searching]] === Searching Now that some data is available, you can search your documents using the @@ -157,7 +157,7 @@ actual classes representing a query. `Product` application objects instead of raw JSON. [discrete] -[[java-client-getting-started-updating]] +[[elasticsearch-java-client-getting-started-updating]] === Updating You can update your documents using the `update` API: @@ -175,7 +175,7 @@ esClient.update(u -> u ---- [discrete] -[[java-client-getting-started-delete]] +[[elasticsearch-java-client-getting-started-delete]] === Delete You can also delete documents: @@ -186,7 +186,7 @@ esClient.delete(d -> d.index("products").id("bk-1")); ---- [discrete] -[[java-client-getting-started-deleting-an-index]] +[[elasticsearch-java-client-getting-started-deleting-an-index]] === Deleting an index [source,java] diff --git a/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc index 7895e007a9..cdf3b6234b 100644 --- a/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[nodejs-client-getting-started]] +[[elasticsearch-nodejs-client-getting-started]] = Get started with the serverless Node.js client :description: Set up and use the Node.js client for {es3}. @@ -11,17 +11,17 @@ client for {es3}, shows you how to initialize the client, and how to perform bas {es} operations with it. [discrete] -[[nodejs-client-getting-started-requirements]] +[[elasticsearch-nodejs-client-getting-started-requirements]] == Requirements * Node.js 16 or higher installed on your system. [discrete] -[[nodejs-client-getting-started-installation]] +[[elasticsearch-nodejs-client-getting-started-installation]] == Installation [discrete] -[[nodejs-client-getting-started-using-the-command-line]] +[[elasticsearch-nodejs-client-getting-started-using-the-command-line]] === Using the command line You can install the Node.js client with the following @@ -33,7 +33,7 @@ npm install @elastic/elasticsearch-serverless ---- [discrete] -[[nodejs-client-getting-started-initialize-the-client]] +[[elasticsearch-nodejs-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -47,10 +47,10 @@ const client = new Client({ }) ---- -To get API keys or the URL for a project, see <>. +To get API keys or the URL for a project, see <>. [discrete] -[[nodejs-client-getting-started-using-the-api]] +[[elasticsearch-nodejs-client-getting-started-using-the-api]] == Using the API After you've initialized the client, you can start ingesting documents. @@ -58,7 +58,7 @@ You can use the `bulk` API for this. This API enables you to index, update, and delete several documents in one request. [discrete] -[[nodejs-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-nodejs-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents You can call the `bulk` helper API with a list of documents and a handler for @@ -93,7 +93,7 @@ const result = await client.helpers.bulk({ ---- [discrete] -[[nodejs-client-getting-started-getting-documents]] +[[elasticsearch-nodejs-client-getting-started-getting-documents]] === Getting documents You can get documents by using the following code: @@ -107,7 +107,7 @@ await client.get({ ---- [discrete] -[[nodejs-client-getting-started-searching]] +[[elasticsearch-nodejs-client-getting-started-searching]] === Searching Now that some data is available, you can search your documents using the `search` API: @@ -126,7 +126,7 @@ console.log(result.hits.hits) ---- [discrete] -[[nodejs-client-getting-started-updating-a-document]] +[[elasticsearch-nodejs-client-getting-started-updating-a-document]] === Updating a document You can call the `update` API to update a document: @@ -144,7 +144,7 @@ await client.update({ ---- [discrete] -[[nodejs-client-getting-started-deleting-a-document]] +[[elasticsearch-nodejs-client-getting-started-deleting-a-document]] === Deleting a document You can call the `delete` API to delete a document: @@ -158,7 +158,7 @@ await client.delete({ ---- [discrete] -[[nodejs-client-getting-started-deleting-an-index]] +[[elasticsearch-nodejs-client-getting-started-deleting-an-index]] === Deleting an index [source,js] @@ -167,7 +167,7 @@ await client.indices.delete({ index: 'books' }) ---- [discrete] -[[nodejs-client-getting-started-typescript]] +[[elasticsearch-nodejs-client-getting-started-typescript]] == TypeScript The Node.js client is implemented in TypeScript. IDEs that support diff --git a/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc index 94037b5d1b..6fd5e0ea15 100644 --- a/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[php-client-getting-started]] +[[elasticsearch-php-client-getting-started]] = Get started with the serverless PHP client :description: Set up and use the PHP client for {es3}. @@ -11,17 +11,17 @@ PHP client for {es3}, shows you how to initialize the client, and how to perform {es} operations with it. [discrete] -[[php-client-getting-started-requirements]] +[[elasticsearch-php-client-getting-started-requirements]] == Requirements * PHP 8.0 or higher installed on your system. [discrete] -[[php-client-getting-started-installation]] +[[elasticsearch-php-client-getting-started-installation]] == Installation [discrete] -[[php-client-getting-started-using-the-command-line]] +[[elasticsearch-php-client-getting-started-using-the-command-line]] === Using the command line You can install the PHP client using @@ -33,7 +33,7 @@ composer require elastic/elasticsearch-serverless ---- [discrete] -[[php-client-getting-started-initialize-the-client]] +[[elasticsearch-php-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -50,10 +50,10 @@ $client = ClientBuilder::create() ->build(); ---- -To get API keys or the Elasticsearch Endpoint for a project, see <>. +To get API keys or the Elasticsearch Endpoint for a project, see <>. [discrete] -[[php-client-getting-started-using-the-api]] +[[elasticsearch-php-client-getting-started-using-the-api]] == Using the API After you've initialized the client, you can start ingesting documents. You can @@ -61,7 +61,7 @@ use the `bulk` API for this. This API enables you to index, update, and delete several documents in one request. [discrete] -[[php-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-php-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents You can call the `bulk` API with a body parameter, an array of actions (index) @@ -138,7 +138,7 @@ var_dump($response->asBool()); // true if HTTP response code between 200 and 3 ---- [discrete] -[[php-client-getting-started-getting-documents]] +[[elasticsearch-php-client-getting-started-getting-documents]] === Getting documents You can get documents by using the following code: @@ -149,7 +149,7 @@ $response = $client->get(index: "books", id: $id); ---- [discrete] -[[php-client-getting-started-searching]] +[[elasticsearch-php-client-getting-started-searching]] === Searching You can search your documents using the `search` API: @@ -170,7 +170,7 @@ https://www.elastic.co/docs/api/doc/elasticsearch-serverless/group/endpoint-sear docs. [discrete] -[[php-client-getting-started-updating-documents]] +[[elasticsearch-php-client-getting-started-updating-documents]] === Updating documents You can call the `update` API to update a document: @@ -185,7 +185,7 @@ printf("Operation result: %s\n", $response['result']); # You get 'updated' as a ---- [discrete] -[[php-client-getting-started-deleting-documents]] +[[elasticsearch-php-client-getting-started-deleting-documents]] === Deleting documents You can call the `delete` API to delete a document: @@ -198,7 +198,7 @@ printf("Operation result: %s\n", $response['result']); # You get "deleted" a as ---- [discrete] -[[php-client-getting-started-deleting-an-index]] +[[elasticsearch-php-client-getting-started-deleting-an-index]] === Deleting an index You can delete an entire index as follows: diff --git a/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc index 608ff6df12..ed26a37ac6 100644 --- a/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[python-client-getting-started]] +[[elasticsearch-python-client-getting-started]] = Get started with the serverless Python client :description: Set up and use the Python client for {es3}. @@ -11,24 +11,24 @@ client for {es3}, shows you how to initialize the client, and how to perform bas {es} operations with it. [discrete] -[[python-client-getting-started-requirements]] +[[elasticsearch-python-client-getting-started-requirements]] == Requirements * Python 3.7 or higher * https://pip.pypa.io/en/stable/[`pip`] [discrete] -[[python-client-getting-started-documentation]] +[[elasticsearch-python-client-getting-started-documentation]] == Documentation Find the full documentation for the Python client on https://elasticsearch-serverless-python.readthedocs.io/en/latest/[readthedocs]. [discrete] -[[python-client-getting-started-installation]] +[[elasticsearch-python-client-getting-started-installation]] == Installation [discrete] -[[python-client-getting-started-using-the-command-line]] +[[elasticsearch-python-client-getting-started-using-the-command-line]] === Using the command line You can install the Python client with the following @@ -40,7 +40,7 @@ python -m pip install elasticsearch-serverless ---- [discrete] -[[python-client-getting-started-initialize-the-client]] +[[elasticsearch-python-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -55,10 +55,10 @@ client = Elasticsearch( ) ---- -To get API keys or the Elasticsearch Endpoint for a project, see <>. +To get API keys or the Elasticsearch Endpoint for a project, see <>. [discrete] -[[python-client-getting-started-using-the-api]] +[[elasticsearch-python-client-getting-started-using-the-api]] == Using the API After you've initialized the client, you can start ingesting documents. You can use @@ -66,7 +66,7 @@ the `bulk` API for this. This API enables you to index, update, and delete sever documents in one request. [discrete] -[[python-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-python-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents You can call the `bulk` API with a body parameter, an array of hashes that @@ -92,7 +92,7 @@ client.bulk( ---- [discrete] -[[python-client-getting-started-getting-documents]] +[[elasticsearch-python-client-getting-started-getting-documents]] === Getting documents You can get documents by using the following code: @@ -104,7 +104,7 @@ print(response.body) ---- [discrete] -[[python-client-getting-started-searching]] +[[elasticsearch-python-client-getting-started-searching]] === Searching Now that some data is available, you can search your documents using the @@ -123,7 +123,7 @@ for hit in response["hits"]["hits"]: ---- [discrete] -[[python-client-getting-started-updating-a-document]] +[[elasticsearch-python-client-getting-started-updating-a-document]] === Updating a document You can call the `update` API to update a document: @@ -137,7 +137,7 @@ client.update(index="books", id="2", doc={ ---- [discrete] -[[python-client-getting-started-deleting-a-document]] +[[elasticsearch-python-client-getting-started-deleting-a-document]] === Deleting a document You can call the `delete` API to delete a document: @@ -148,7 +148,7 @@ client.delete(index="books", id="3") ---- [discrete] -[[python-client-getting-started-deleting-an-index]] +[[elasticsearch-python-client-getting-started-deleting-an-index]] === Deleting an index [source,python] diff --git a/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc index bc085f1c0b..0ac75bb872 100644 --- a/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc @@ -1,4 +1,4 @@ -[[ruby-client-getting-started]] +[[elasticsearch-ruby-client-getting-started]] = Get started with the serverless Ruby client :description: Set up and use the Ruby client for {es3}. @@ -11,7 +11,7 @@ client for {es3}, shows you how to initialize the client, and how to perform bas {es} operations with it. [discrete] -[[ruby-client-getting-started-requirements]] +[[elasticsearch-ruby-client-getting-started-requirements]] == Requirements * Ruby 3.0 or higher installed on your system. @@ -19,11 +19,11 @@ client for {es3}, shows you how to initialize the client, and how to perform bas * [discrete] -[[ruby-client-getting-started-installation]] +[[elasticsearch-ruby-client-getting-started-installation]] == Installation [discrete] -[[ruby-client-getting-started-from-githubs-releases]] +[[elasticsearch-ruby-client-getting-started-from-githubs-releases]] === From GitHub's releases You can install the Ruby Client from RubyGems: @@ -37,7 +37,7 @@ Check https://github.com/elastic/elasticsearch-serverless-ruby/releases[releases for the latest available versions. [discrete] -[[ruby-client-getting-started-from-the-source-code]] +[[elasticsearch-ruby-client-getting-started-from-the-source-code]] === From the source code You can install the Ruby client from the client's https://github.com/elastic/elasticsearch-serverless-ruby[source @@ -52,7 +52,7 @@ gem install elasticsearch-serverless-x.x.x.gem ---- [discrete] -[[ruby-client-getting-started-using-the-gemfile]] +[[elasticsearch-ruby-client-getting-started-using-the-gemfile]] === Using the Gemfile Alternatively, you can include the client gem in your Ruby project's Gemfile: @@ -70,7 +70,7 @@ require 'elasticsearch-serverless' ---- [discrete] -[[ruby-client-getting-started-running-a-ruby-console]] +[[elasticsearch-ruby-client-getting-started-running-a-ruby-console]] === Running a Ruby console You can also run the client from a Ruby console using the client's https://github.com/elastic/elasticsearch-serverless-ruby[source @@ -85,7 +85,7 @@ bundle exec rake console ---- [discrete] -[[ruby-client-getting-started-initialize-the-client]] +[[elasticsearch-ruby-client-getting-started-initialize-the-client]] == Initialize the client Initialize the client using your API key and Elasticsearch Endpoint: @@ -98,10 +98,10 @@ client = ElasticsearchServerless::Client.new( ) ---- -To get API keys or the Elasticsearch Endpoint for a project, see <>. +To get API keys or the Elasticsearch Endpoint for a project, see <>. [discrete] -[[ruby-client-getting-started-using-the-api]] +[[elasticsearch-ruby-client-getting-started-using-the-api]] == Using the API After you've initialized the client, you can start ingesting documents. You can use @@ -110,11 +110,11 @@ documents in one request. [NOTE] ==== -The code examples in this section use the Ruby console. To set up the console, <>. +The code examples in this section use the Ruby console. To set up the console, <>. ==== [discrete] -[[ruby-client-getting-started-creating-an-index-and-ingesting-documents]] +[[elasticsearch-ruby-client-getting-started-creating-an-index-and-ingesting-documents]] === Creating an index and ingesting documents You can call the `bulk` API with a body parameter, an array of hashes that @@ -155,7 +155,7 @@ also behaves as a Hash, so you can access the body values directly as seen on the previous example with `response['items']`. [discrete] -[[ruby-client-getting-started-getting-documents]] +[[elasticsearch-ruby-client-getting-started-getting-documents]] === Getting documents You can get documents by using the following code: @@ -166,7 +166,7 @@ You can get documents by using the following code: ---- [discrete] -[[ruby-client-getting-started-searching]] +[[elasticsearch-ruby-client-getting-started-searching]] === Searching Now that some data is available, you can search your documents using the @@ -181,7 +181,7 @@ Now that some data is available, you can search your documents using the ---- [discrete] -[[ruby-client-getting-started-updating-a-document]] +[[elasticsearch-ruby-client-getting-started-updating-a-document]] === Updating a document You can call the `update` API to update a document: @@ -196,7 +196,7 @@ You can call the `update` API to update a document: ---- [discrete] -[[ruby-client-getting-started-deleting-a-document]] +[[elasticsearch-ruby-client-getting-started-deleting-a-document]] === Deleting a document You can call the `delete` API to delete a document: @@ -207,7 +207,7 @@ You can call the `delete` API to delete a document: ---- [discrete] -[[ruby-client-getting-started-deleting-an-index]] +[[elasticsearch-ruby-client-getting-started-deleting-an-index]] === Deleting an index [source,ruby] diff --git a/serverless/pages/elasticsearch/clients.asciidoc b/serverless/pages/elasticsearch/clients.asciidoc index 67f802eade..df73b01804 100644 --- a/serverless/pages/elasticsearch/clients.asciidoc +++ b/serverless/pages/elasticsearch/clients.asciidoc @@ -1,4 +1,4 @@ -[[clients]] +[[elasticsearch-clients]] = Client libraries :description: Index, search, and manage {es} data in your preferred language. @@ -9,10 +9,10 @@ preview:[] {es3} provides official language clients to use {es} REST APIs. Currently, the following language clients are supported: -* <> | https://github.com/elastic/elasticsearch-serverless-go[Repository] -* <> | https://github.com/elastic/elasticsearch-java/tree/main/java-client-serverless[Repository] -* <> | https://github.com/elastic/elasticsearch-net[Repository] -* <> | https://github.com/elastic/elasticsearch-serverless-js[Repository] -* <> | https://github.com/elastic/elasticsearch-serverless-php[Repository] -* <> | https://github.com/elastic/elasticsearch-serverless-python[Repository] -* <> | https://github.com/elastic/elasticsearch-serverless-ruby[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-go[Repository] +* <> | https://github.com/elastic/elasticsearch-java/tree/main/java-client-serverless[Repository] +* <> | https://github.com/elastic/elasticsearch-net[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-js[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-php[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-python[Repository] +* <> | https://github.com/elastic/elasticsearch-serverless-ruby[Repository] diff --git a/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc b/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc index 2ca9c62f7b..d7fb3d77a0 100644 --- a/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc +++ b/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc @@ -1,4 +1,4 @@ -[[dev-tools]] +[[elasticsearch-dev-tools]] = Developer tools :description: Elastic tools for developers. @@ -7,7 +7,7 @@ preview:[] [discrete] -[[dev-tools-developer-tools]] +[[elasticsearch-dev-tools-developer-tools]] == Developer tools A number of developer tools are available in your project's UI under the **Dev Tools** section. diff --git a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc index 998a950c53..60db48de02 100644 --- a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data-alerting]] +[[elasticsearch-explore-your-data-alerting]] = Manage alerting rules :description: Define when to generate alerts and notifications with alerting rules. @@ -34,7 +34,7 @@ For more information, go to missing link [discrete] -[[explore-your-data-alerting-snooze-and-disable-rules]] +[[elasticsearch-explore-your-data-alerting-snooze-and-disable-rules]] == Snooze and disable rules The rule listing enables you to quickly snooze, disable, enable, or delete individual rules. @@ -77,7 +77,7 @@ image::images/rule-snooze-panel.png[Snooze notifications for a rule] When a rule is in a snoozed state, you can cancel or change the duration of this state. [discrete] -[[explore-your-data-alerting-import-and-export-rules]] +[[elasticsearch-explore-your-data-alerting-import-and-export-rules]] == Import and export rules To import and export rules, use https://www.elastic.co/docs/current/serverless/saved-objects[saved objects]. @@ -95,7 +95,7 @@ Rules are disabled on export. You are prompted to re-enable the rule on successf image::images/rules-imported-banner.png[Rules import banner] [discrete] -[[explore-your-data-alerting-view-rule-details]] +[[elasticsearch-explore-your-data-alerting-view-rule-details]] == View rule details You can determine the health of a rule by looking at its **Last response**. diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc index 4d48a9c7ff..38d7b47353 100644 --- a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data-discover-your-data]] +[[elasticsearch-explore-your-data-discover-your-data]] = Discover your data :description: Learn how to use Discover to gain insights into your data. @@ -11,11 +11,11 @@ about the structure of the fields, and display your findings in a visualization. You can also customize and save your searches and place them on a dashboard. [discrete] -[[explore-your-data-discover-your-data-explore-and-query-your-data]] +[[elasticsearch-explore-your-data-discover-your-data-explore-and-query-your-data]] == Explore and query your data This tutorial shows you how to use **Discover** to search large amounts of -data and understand what’s going on at any given time. This tutorial uses the book sample data set from the <>. +data and understand what’s going on at any given time. This tutorial uses the book sample data set from the <>. You’ll learn to: @@ -29,7 +29,7 @@ At the end of this tutorial, you’ll be ready to start exploring with your own data in **Discover**. [discrete] -[[explore-your-data-discover-your-data-find-your-data]] +[[elasticsearch-explore-your-data-discover-your-data-find-your-data]] == Find your data Tell {kib} where to find the data you want to explore, and then specify the time range in which to view that data. @@ -76,7 +76,7 @@ image::images/book-data.png[Your book data displayed] . Click image:images/icons/plusInCircleFilled.svg[Add] to toggle the field into the document table. You can also drag the field from the **Available fields** list into the document table. [discrete] -[[explore-your-data-discover-your-data-add-a-field-to-your-data-source]] +[[elasticsearch-explore-your-data-discover-your-data-add-a-field-to-your-data-source]] == Add a field to your {data-source} What happens if you forgot to define an important value as a separate field? Or, what if you @@ -162,7 +162,7 @@ Save your search so you can use it later to generate a CSV report, create visual . Click **Save**. [discrete] -[[explore-your-data-discover-your-data-visualize-your-findings]] +[[elasticsearch-explore-your-data-discover-your-data-visualize-your-findings]] == Visualize your findings If a field can be {ref}/search-aggregations.html[aggregated], you can quickly visualize it from **Discover**. @@ -200,4 +200,4 @@ The **Create rule** form is pre-filled with the latest query sent to {es}. . Configure your {es} query and select a connector type. . Click **Save**. -For more about this and other rules provided in {alert-features}, go to <>. +For more about this and other rules provided in {alert-features}, go to <>. diff --git a/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc b/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc index e78c44bfcc..8449d71dfd 100644 --- a/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data-aggregations]] +[[elasticsearch-explore-your-data-aggregations]] = Aggregations :description: Aggregate and summarize your {es} data. @@ -27,7 +27,7 @@ or other criteria. other aggregations instead of documents or fields. [discrete] -[[explore-your-data-aggregations-run-an-aggregation]] +[[elasticsearch-explore-your-data-aggregations-run-an-aggregation]] == Run an aggregation You can run aggregations as part of a search by specifying the search API's `aggs` parameter. The @@ -86,7 +86,7 @@ Aggregation results are in the response's `aggregations` object: <1> Results for the `my-agg-name` aggregation. [discrete] -[[explore-your-data-aggregations-change-an-aggregations-scope]] +[[elasticsearch-explore-your-data-aggregations-change-an-aggregations-scope]] == Change an aggregation's scope Use the `query` parameter to limit the documents on which an aggregation runs: @@ -122,7 +122,7 @@ curl "${ES_URL}/my-index/_search?pretty" \ // TEST[s/my-field/http.request.method/] [discrete] -[[explore-your-data-aggregations-return-only-aggregation-results]] +[[elasticsearch-explore-your-data-aggregations-return-only-aggregation-results]] == Return only aggregation results By default, searches containing an aggregation return both search hits and @@ -152,7 +152,7 @@ curl "${ES_URL}/my-index/_search?pretty" \ // TEST[s/my-field/http.request.method/] [discrete] -[[explore-your-data-aggregations-run-multiple-aggregations]] +[[elasticsearch-explore-your-data-aggregations-run-multiple-aggregations]] == Run multiple aggregations You can specify multiple aggregations in the same request: @@ -187,7 +187,7 @@ curl "${ES_URL}/my-index/_search?pretty" \ // TEST[s/my-other-field/http.response.bytes/] [discrete] -[[explore-your-data-aggregations-run-sub-aggregations]] +[[elasticsearch-explore-your-data-aggregations-run-sub-aggregations]] == Run sub-aggregations Bucket aggregations support bucket or metric sub-aggregations. For example, a @@ -263,7 +263,7 @@ The response nests sub-aggregation results under their parent aggregation: <2> Results for `my-agg-name`'s sub-aggregation, `my-sub-agg-name`. [discrete] -[[explore-your-data-aggregations-add-custom-metadata]] +[[elasticsearch-explore-your-data-aggregations-add-custom-metadata]] == Add custom metadata Use the `meta` object to associate custom metadata with an aggregation: @@ -315,7 +315,7 @@ The response returns the `meta` object in place: // TESTRESPONSE[s/\.\.\./"took": "$body.took", "timed_out": false, "_shards": "$body._shards", "hits": "$body.hits",/] [discrete] -[[explore-your-data-aggregations-return-the-aggregation-type]] +[[elasticsearch-explore-your-data-aggregations-return-the-aggregation-type]] == Return the aggregation type By default, aggregation results include the aggregation's name but not its type. @@ -377,7 +377,7 @@ the aggregated field. <1> The aggregation type, `histogram`, followed by a `#` separator and the aggregation's name, `my-agg-name`. [discrete] -[[explore-your-data-aggregations-use-scripts-in-an-aggregation]] +[[elasticsearch-explore-your-data-aggregations-use-scripts-in-an-aggregation]] == Use scripts in an aggregation When a field doesn't exactly match the aggregation you need, you @@ -417,13 +417,13 @@ some of their optimizations with runtime fields. In total, performance costs for using a runtime field varies from aggregation to aggregation. [discrete] -[[explore-your-data-aggregations-aggregation-caches]] +[[elasticsearch-explore-your-data-aggregations-aggregation-caches]] == Aggregation caches For faster responses, {es} caches the results of frequently run aggregations in the {ref}/shard-request-cache.html[shard request cache]. To get cached results, use the same {ref}/search-shard-routing.html#shard-and-node-preference[`preference` string] for each search. If you -don't need search hits, <> to avoid +don't need search hits, <> to avoid filling the cache. {es} routes searches with the same preference string to the same shards. If the @@ -431,7 +431,7 @@ shards' data doesn't change between searches, the shards return cached aggregation results. [discrete] -[[explore-your-data-aggregations-limits-for-long-values]] +[[elasticsearch-explore-your-data-aggregations-limits-for-long-values]] == Limits for `long` values When running aggregations, {es} uses {ref}/number.html[`double`] values to hold and diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc index b2b8dcdafb..6c3f7e5155 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc @@ -1,4 +1,4 @@ -[role="exclude",id="explore-your-data-dashboards"] +[role="exclude",id="elasticsearch-explore-your-data-dashboards"] = Create dashboards :description: Create dashboards to visualize and monitor your {es} data. @@ -25,7 +25,7 @@ When you create a dashboard, you are automatically in edit mode and can make cha When you open an existing dashboard, you are in view mode. To make changes, click **Edit** in the toolbar. [discrete] -[[explore-your-data-dashboards-add-data-and-create-a-dashboard]] +[[elasticsearch-explore-your-data-dashboards-add-data-and-create-a-dashboard]] == Add data and create a dashboard Add the sample web logs data, and create and set up the dashboard. @@ -39,13 +39,13 @@ Create the dashboard where you'll display the visualization panels. . Open the main menu, then click **Dashboard**. . Click **[Logs] Web Traffic**. -By default some visualization panels have been created for you using the sample data. Go to <> to learn about the different visualizations. +By default some visualization panels have been created for you using the sample data. Go to <> to learn about the different visualizations. [role="screenshot"] image::images/dashboard-example.png[dashboard with default visualizations using sample data] [discrete] -[[explore-your-data-dashboards-reset-the-dashboard]] +[[elasticsearch-explore-your-data-dashboards-reset-the-dashboard]] == Reset the dashboard To remove any changes you've made, reset the dashboard to the last saved changes. @@ -54,7 +54,7 @@ To remove any changes you've made, reset the dashboard to the last saved changes . Click **Reset dashboard**. [discrete] -[[explore-your-data-dashboards-save-dashboards]] +[[elasticsearch-explore-your-data-dashboards-save-dashboards]] == Save dashboards When you've finished making changes to the dashboard, save it. @@ -63,7 +63,7 @@ When you've finished making changes to the dashboard, save it. . To exit **Edit** mode, click **Switch to view mode**. [discrete] -[[explore-your-data-dashboards-add-dashboard-settings]] +[[elasticsearch-explore-your-data-dashboards-add-dashboard-settings]] == Add dashboard settings When creating a new dashboard you can add the title, tags, design options, and more to the dashboard. @@ -83,13 +83,13 @@ When creating a new dashboard you can add the title, tags, design options, and m . Click **Apply**. [discrete] -[[explore-your-data-dashboards-share-dashboards]] +[[elasticsearch-explore-your-data-dashboards-share-dashboards]] == Share dashboards To share the dashboard with a larger audience, click **Share** in the toolbar. For detailed information about the sharing options, refer to {kibana-ref}/reporting-getting-started.html[Reporting]. [discrete] -[[explore-your-data-dashboards-export-dashboards]] +[[elasticsearch-explore-your-data-dashboards-export-dashboards]] == Export dashboards To automate {kib}, you can export dashboards as JSON using the {kibana-ref}/saved-objects-api-export.html[Export objects API]. It is important to export dashboards with all necessary references. diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc index cc0f7683c6..d245593406 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc @@ -1,4 +1,4 @@ -[role="exclude",id="explore-your-data-visualizations"] +[role="exclude",id="elasticsearch-explore-your-data-visualizations"] = Create visualizations :description: Create charts, graphs, maps, and more from your {es} data. @@ -7,10 +7,10 @@ preview:[] Learn how to create some visualization panels to add to your dashboard. -This tutorial uses the same web logs sample data from <>. +This tutorial uses the same web logs sample data from <>. [discrete] -[[explore-your-data-visualizations-open-the-visualization-editor-and-get-familiar-with-the-data]] +[[elasticsearch-explore-your-data-visualizations-open-the-visualization-editor-and-get-familiar-with-the-data]] == Open the visualization editor and get familiar with the data Once you have loaded the web logs sample data into your dashboard lets open the visualization editor, to ensure the correct fields appear. @@ -29,7 +29,7 @@ To create the visualizations in this tutorial, you'll use the following fields: To see the most frequent values in a field, hover over the field name, then click _i_. [discrete] -[[explore-your-data-visualizations-create-your-first-visualization]] +[[elasticsearch-explore-your-data-visualizations-create-your-first-visualization]] == Create your first visualization Pick a field you want to analyze, such as **clientip**. To analyze only the **clientip** field, use the **Metric** visualization to display the field as a number. @@ -50,7 +50,7 @@ b. Click **Close**. **[No Title]** appears in the visualization panel header. Since the visualization has its own `Unique visitors` label, you do not need to add a panel title. [discrete] -[[explore-your-data-visualizations-view-a-metric-over-time]] +[[elasticsearch-explore-your-data-visualizations-view-a-metric-over-time]] == View a metric over time There are two shortcuts you can use to view metrics over time. @@ -90,7 +90,7 @@ Since you removed the axis labels, add a panel title: . In the **Title** field, enter `Median of bytes`, then click **Apply**. [discrete] -[[explore-your-data-visualizations-view-the-top-values-of-a-field]] +[[elasticsearch-explore-your-data-visualizations-view-the-top-values-of-a-field]] == View the top values of a field Create a visualization that displays the most frequent values of **request.keyword** on your website, ranked by the unique visitors. To create the visualization, use **Top values of request.keyword** ranked by **Unique count of clientip**, instead of being ranked by **Count of records**. @@ -121,7 +121,7 @@ c. Click **Close**. Since the table columns are labeled, you do not need to add a panel title. [discrete] -[[explore-your-data-visualizations-compare-a-subset-of-documents-to-all-documents]] +[[elasticsearch-explore-your-data-visualizations-compare-a-subset-of-documents-to-all-documents]] == Compare a subset of documents to all documents Create a proportional visualization that helps you determine if your users transfer more bytes from documents under 10KB versus documents over 10KB. @@ -161,7 +161,7 @@ Add a panel title: . In the **Title** field, enter `Sum of bytes from large requests`, then click **Apply**. [discrete] -[[explore-your-data-visualizations-view-the-distribution-of-a-number-field]] +[[elasticsearch-explore-your-data-visualizations-view-the-distribution-of-a-number-field]] == View the distribution of a number field The distribution of a number can help you find patterns. For example, you can analyze the website traffic per hour to find the best time for routine maintenance. @@ -185,7 +185,7 @@ Add a panel title: . In the **Title** field, enter `Website traffic`, then click **Apply**. [discrete] -[[explore-your-data-visualizations-create-a-multi-level-chart]] +[[elasticsearch-explore-your-data-visualizations-create-a-multi-level-chart]] == Create a multi-level chart **Table** and **Proportion** visualizations support multiple functions. For example, to create visualizations that break down the data by website traffic sources and user geography, apply the **Filters** and **Top values** functions. @@ -232,13 +232,13 @@ Add a panel title: . In the **Title** field, enter `Page views by location and referrer`, then click **Apply**. [discrete] -[[explore-your-data-visualizations-visualization-panels]] +[[elasticsearch-explore-your-data-visualizations-visualization-panels]] == Visualization panels Visualization panels are how you display visualizations of your data and what make Kibana such a useful tool. Panels are designed to build interactive dashboards. [discrete] -[[explore-your-data-visualizations-create-and-add-panels]] +[[elasticsearch-explore-your-data-visualizations-create-and-add-panels]] === Create and add panels Create new panels, which can be accessed from the dashboard toolbar or the **Visualize Library**, or add panels that are saved in the **Visualize Library**, or search results from <>. @@ -268,13 +268,13 @@ To add existing panels from the **Visualize Library**: . Click the panel you want to add to the dashboard, then click _X_. [discrete] -[[explore-your-data-visualizations-save-panels]] +[[elasticsearch-explore-your-data-visualizations-save-panels]] === Save panels Consider where you want to save and add the panel in {kib}. [discrete] -[[explore-your-data-visualizations-save-to-the-visualize-library]] +[[elasticsearch-explore-your-data-visualizations-save-to-the-visualize-library]] ==== Save to the Visualize Library To use the panel on other dashboards, save the panel to the **Visualize Library**. When panels are saved in the **Visualize Library**, image:images/icons/folderCheck.svg[Visualize Library] appears in the panel header. @@ -292,7 +292,7 @@ If you created the panel from the **Visualize Library**: . Click **Save**. [discrete] -[[explore-your-data-visualizations-save-to-the-dashboard]] +[[elasticsearch-explore-your-data-visualizations-save-to-the-dashboard]] ==== Save to the dashboard Return to the dashboard and add the panel without specifying the save options or adding the panel to the **Visualize Library**. @@ -315,7 +315,7 @@ To add unsaved panels to the **Visualize Library**: . Enter the panel title, then click **Save**. [discrete] -[[explore-your-data-visualizations-arrange-panels]] +[[elasticsearch-explore-your-data-visualizations-arrange-panels]] === Arrange panels Compare the data in your panels side-by-side, organize panels by priority, resize the panels so they all appear on the dashboard without scrolling down, and more. @@ -327,7 +327,7 @@ In the toolbar, click **Edit**, then use the following options: * To maximize to fullscreen, open the panel menu, then click **More → Maximize panel**. [discrete] -[[explore-your-data-visualizations-add-text-panels]] +[[elasticsearch-explore-your-data-visualizations-add-text-panels]] === Add text panels Add **Text** panels to your dashboard that display important information, instructions, and more. You create **Text** panels using https://github.github.com/gfm/[GitHub-flavored Markdown] text. @@ -338,7 +338,7 @@ Add **Text** panels to your dashboard that display important information, instru . To save the new text panel to your dashboard click **Save**. [discrete] -[[explore-your-data-visualizations-add-image-panels]] +[[elasticsearch-explore-your-data-visualizations-add-image-panels]] === Add image panels To personalize your dashboards, add your own logos and graphics with the **Image** panel. You can upload images from your computer, or add images from an external link. diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc index 67548ab641..8c8ae1b5ed 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data-visualize-your-data]] +[[elasticsearch-explore-your-data-visualize-your-data]] = Visualize your data :description: Build dynamic dashboards and visualizations for your {es} data. @@ -22,7 +22,7 @@ Notice you can filter the list of dashboards: * Click a dashboard's tags to toggle filtering for each tag. [discrete] -[[explore-your-data-visualize-your-data-create-new-dashboards]] +[[elasticsearch-explore-your-data-visualize-your-data-create-new-dashboards]] == Create new dashboards To create a new dashboard, click **Create dashboard** and begin adding visualizations. diff --git a/serverless/pages/elasticsearch/explore-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data.asciidoc index 5c869f2fd3..aea1310171 100644 --- a/serverless/pages/elasticsearch/explore-your-data.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data.asciidoc @@ -1,4 +1,4 @@ -[[explore-your-data]] +[[elasticsearch-explore-your-data]] = Explore your data :description: Turn {es} data into actionable insights with aggregations, visualizations, and alerts @@ -8,7 +8,7 @@ preview:[] In addition to search, {es3} offers several options for analyzing and visualizing your data. -* <>: Use the {es} REST API to summarize your data as metrics, statistics, or other analytics. -* <>: Use the **Discover** UI to filter your data or learn about its structure. -* <>: Build dynamic dashboards that visualize your data as charts, gauges, graphs, maps, and more. -* <>: Create rules that trigger notifications based on your data. +* <>: Use the {es} REST API to summarize your data as metrics, statistics, or other analytics. +* <>: Use the **Discover** UI to filter your data or learn about its structure. +* <>: Build dynamic dashboards that visualize your data as charts, gauges, graphs, maps, and more. +* <>: Create rules that trigger notifications based on your data. diff --git a/serverless/pages/elasticsearch/get-started.asciidoc b/serverless/pages/elasticsearch/get-started.asciidoc index 9e55c8640a..2dded24f26 100644 --- a/serverless/pages/elasticsearch/get-started.asciidoc +++ b/serverless/pages/elasticsearch/get-started.asciidoc @@ -1,4 +1,4 @@ -[[get-started]] +[[elasticsearch-get-started]] = Get started :description: Get started with {es3} in a few steps @@ -10,13 +10,13 @@ Follow along to set up your {es} project and get started with some sample docume Then, choose how to continue with your own data. [discrete] -[[get-started-create-project]] +[[elasticsearch-get-started-create-project]] == Create project Use your {ecloud} account to create a fully-managed {es} project: . Navigate to {ess-console}[cloud.elastic.co] and create a new account or log in to your existing account. -. Within **Fully-managed projects**, choose **Create project**. +. Within **Serverless Projects**, choose **Create project**. . Choose the {es} project type. . Select a **configuration** for your project, based on your use case. + @@ -31,19 +31,20 @@ You should now see **Get started with {es}**, and you're ready to continue. include::../../partials/minimum-vcus-detail.asciidoc[] [discrete] -[[get-started-create-api-key]] +[[elasticsearch-get-started-create-api-key]] == Create API key Create an API key, which will enable you to access the {es} API to ingest and search data. . Scroll to **Add an API Key** and select **New**. -. In **Create an API key**, enter a name for your key and its expiration. -Select **Create API Key** to finish. +. In **Create API Key**, enter a name for your key and (optionally) set an expiration date. +. (Optional) Under **Control Security privileges**, you can set specific access permissions for this API key. By default, it has full access to all APIs. +. (Optional) The **Add metadata** section allows you to add custom key-value pairs to help identify and organize your API keys. +. Select **Create API Key** to finish. -The API key is displayed as a set of values, including `id`, `name`, `expiration`, `api_key`, and `encoded`. -Store this information securely—it is displayed only once. - -You will use the `encoded` value when sending API requests. +After creation, you'll see your API key displayed as an encoded string. +Store this encoded API key securely. It is displayed only once and cannot be retrieved later. +You will use this encoded API key when sending API requests. [NOTE] ==== @@ -51,7 +52,7 @@ You can't recover or retrieve a lost API key. Instead, you must delete the key a ==== [discrete] -[[get-started-copy-url]] +[[elasticsearch-get-started-copy-url]] == Copy URL Next, copy the URL of your API endpoint. @@ -64,7 +65,7 @@ Store this value along with your `encoded` API key. You'll use both values in the next step. [discrete] -[[get-started-test-connection]] +[[elasticsearch-get-started-test-connection]] == Test connection We'll use the `curl` command to test your connection and make additional API requests. @@ -106,12 +107,12 @@ You should receive a response similar to the following: Now you're ready to ingest and search some sample documents. [discrete] -[[get-started-ingest-data]] +[[elasticsearch-get-started-ingest-data]] == Ingest data [NOTE] ==== -This example uses {es} APIs to ingest data. If you'd prefer to upload a file using the UI, refer to <>. +This example uses {es} APIs to ingest data. If you'd prefer to upload a file using the UI, refer to <>. ==== To ingest data, you must create an index and store some documents. @@ -158,7 +159,7 @@ You should receive a response indicating there were no errors: ---- [discrete] -[[get-started-search-data]] +[[elasticsearch-get-started-search-data]] == Search data To search, send a `POST` request to the `_search` endpoint, specifying the index to search. @@ -219,7 +220,7 @@ You should receive a response with the results: ---- [discrete] -[[get-started-continue-on-your-own]] +[[elasticsearch-get-started-continue-on-your-own]] == Continue on your own Congratulations! @@ -227,20 +228,20 @@ You've set up an {es} project, and you've ingested and searched some sample data Now you're ready to continue on your own. [discrete] -[[get-started-explore]] +[[elasticsearch-get-started-explore]] === Explore Want to explore the sample documents or your own data? -By creating a data view, you can explore data using several UI tools, such as Discover or Dashboards. Or, use {es} aggregations to explore your data using the API. Find more information in <>. +By creating a data view, you can explore data using several UI tools, such as Discover or Dashboards. Or, use {es} aggregations to explore your data using the API. Find more information in <>. [discrete] -[[get-started-build]] +[[elasticsearch-get-started-build]] === Build Ready to build your own solution? -To learn more about sending and syncing data to {es}, or the search API and its query DSL, check <> and <>. +To learn more about sending and syncing data to {es}, or the search API and its query DSL, check <> and <>. //// /* diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc index 94f891d904..4844b3abb9 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc @@ -1,4 +1,4 @@ -[[ingest-data-through-api]] +[[elasticsearch-ingest-data-through-api]] = Ingest data through API :description: Add data to {es} using HTTP APIs or a language client. @@ -8,21 +8,21 @@ preview:[] The {es} APIs enable you to ingest data through code. You can use the APIs of one of the -<> or the +<> or the {es} HTTP APIs. The examples on this page use the HTTP APIs to demonstrate how ingesting works in {es} through APIs. If you want to ingest timestamped data or have a more complex ingestion use case, check out -<> or -<>. +<> or +<>. // . // ^^^^Page temporarily removed [discrete] -[[ingest-data-through-api-using-the-bulk-api]] +[[elasticsearch-ingest-data-through-api-using-the-bulk-api]] == Using the bulk API You can index multiple JSON documents to an index and make it searchable using @@ -115,7 +115,7 @@ matches the query. The response contains the whole document. Only one document matches this query. [discrete] -[[ingest-data-through-api-using-the-index-api]] +[[elasticsearch-ingest-data-through-api-using-the-index-api]] == Using the index API Use the index API to ingest a single document to an index. Following the diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc index 8f66b40bdd..42d05a7a6d 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc @@ -1,4 +1,4 @@ -[[ingest-data-through-beats]] +[[elasticsearch-ingest-data-through-beats]] = Beats :description: Use {beats} to ship operational data to {es}. @@ -42,7 +42,7 @@ can further process and enhance the data before visualizing it in {kib}. [NOTE] ==== When you use {beats} to export data to an {es} project, the {beats} require an API key to authenticate with {es}. -Refer to <> for the steps to set up your API key, +Refer to <> for the steps to set up your API key, and to https://www.elastic.co/guide/en/beats/filebeat/current/beats-api-keys.html[Grant access using API keys] in the Filebeat documentation for an example of how to configure your {beats} to use the key. ==== diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc index 20b689d7fb..af38aa4d24 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc @@ -1,26 +1,29 @@ -[[ingest-data-through-integrations-connector-client]] +[[elasticsearch-ingest-data-through-integrations-connector-client]] = Connector clients +:description: Set up and deploy self-managed connectors that run on your own infrastructure. +:keywords: serverless, elasticsearch, ingest, connector, how to + [NOTE] ==== This page contains high-level instructions about setting up connector clients in your project's UI. Because prerequisites and configuration details vary by data source, you'll need to refer to the individual connector documentation for specific details. ==== -A *connector* is a type of https://www.elastic.co/integrations/data-integrations[Elastic integration] that syncs data from an original data source to {es}. +A _connector_ is a type of https://www.elastic.co/integrations/data-integrations[Elastic integration] that syncs data from an original data source to {es}. Each connector extracts the original files, records, or objects; and transforms them into documents within {es}. -*Connector clients* are **self-managed** connectors that you run on your own infrastructure. +_Connector clients_ are **self-managed** connectors that you run on your own infrastructure. These connectors are written in Python and the source code is available in the https://github.com/elastic/connectors/tree/main/connectors/sources[`elastic/connectors`] repo. [discrete] -[[ingest-data-through-integrations-connector-client-available-connectors]] +[[elasticsearch-ingest-data-through-integrations-connector-client-available-connectors]] == Available connectors Connector clients are available for the following third-party data sources: -[%collapsible] .Click to expand +[%collapsible] ===== // TODO: Update links if these references move @@ -56,7 +59,7 @@ Connector clients are available for the following third-party data sources: ===== [discrete] -[[ingest-data-through-integrations-connector-client-overview]] +[[elasticsearch-ingest-data-through-integrations-connector-client-overview]] == Overview Because connector clients are self-managed on your own infrastructure, they run outside of your {es} serverless project. @@ -76,18 +79,18 @@ At a high-level, the workflow looks like this: ==== [discrete] -[[ingest-data-through-integrations-connector-client-data-source-prerequisites]] +[[elasticsearch-ingest-data-through-integrations-connector-client-data-source-prerequisites]] === Data source prerequisites The first decision you need to make before deploying a connector is which third party service (data source) you want to sync to {es}. -See the list of <>. +See the list of <>. Note that each data source will have specific prerequisites you'll need to meet to authorize the connector to access its data. For example, certain data sources may require you to create an OAuth application, or create a service account. -You'll need to check the <> for these details. +You'll need to check the <> for these details. [discrete] -[[ingest-data-through-integrations-connector-client-step-1-initial-setup-in-ui]] +[[elasticsearch-ingest-data-through-integrations-connector-client-step-1-initial-setup-in-ui]] == Step 1: Initial setup in UI In your project's UI, go to **{es} → Connectors**. @@ -102,7 +105,7 @@ You'll need to update these values in your https://github.com/elastic/connectors . Run the connector code either from source or with Docker, following the instructions below. [discrete] -[[ingest-data-through-integrations-connector-client-step-2-deploy-your-self-managed-connector]] +[[elasticsearch-ingest-data-through-integrations-connector-client-step-2-deploy-your-self-managed-connector]] == Step 2: Deploy your self-managed connector To use connector clients, you must deploy the connector service so your connector can talk to your {es} instance. @@ -110,8 +113,8 @@ The source code is hosted in the `elastic/connectors` repository. You have two deployment options: -* Run with <> (recommended) -* Run from <> +* Run with <> (recommended) +* Run from <> [NOTE] ==== @@ -124,7 +127,7 @@ You'll need the following values handy to update your `config.yml` file: ==== [discrete] -[[ingest-data-through-integrations-connector-client-run-with-docker]] +[[elasticsearch-ingest-data-through-integrations-connector-client-run-with-docker]] === Run with Docker You can deploy connector clients using Docker. @@ -187,7 +190,7 @@ Each individual connector client reference contain instructions for deploying sp ==== [discrete] -[[ingest-data-through-integrations-connector-client-run-from-source]] +[[elasticsearch-ingest-data-through-integrations-connector-client-run-from-source]] === Run from source Running from source requires cloning the repository and running the code locally. @@ -248,7 +251,7 @@ The connector service should now be running in your terminal. If the connection Here we're working locally. In a production setup, you'll deploy the connector service to your own infrastructure. [discrete] -[[ingest-data-through-integrations-connector-client-step-3-enter-data-source-details-in-ui]] +[[elasticsearch-ingest-data-through-integrations-connector-client-step-3-enter-data-source-details-in-ui]] == Step 3: Enter data source details in UI Once the connector service is running, it's time to head back to the UI to finalize the connector configuration. @@ -266,7 +269,7 @@ For example, the Sharepoint Online connector requires the following details abou * **Comma-separated list of tables** [discrete] -[[ingest-data-through-integrations-connector-client-step-4-connect-to-an-index]] +[[elasticsearch-ingest-data-through-integrations-connector-client-step-4-connect-to-an-index]] == Step 4: Connect to an index Once you've entered the data source details, you need to connect to an index. @@ -293,7 +296,7 @@ When a sync is launched you'll start to see documents being added to your {es} i Learn https://github.com/elastic/connectors/blob/main/docs/DEVELOPING.md#syncing[how syncing works] in the `elastic/connectors` repo docs. [discrete] -[[ingest-data-through-integrations-connector-client-learn-more]] +[[elasticsearch-ingest-data-through-integrations-connector-client-learn-more]] == Learn more * Read the main https://www.elastic.co/guide/en/elasticsearch/reference/master/es-connectors.html[Elastic connectors documentation] diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx index b70440ba02..7f958ddeba 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx @@ -2,12 +2,12 @@ slug: /serverless/elasticsearch/ingest-data-through-integrations-connector-client title: Connector clients description: Set up and deploy self-managed connectors that run on your own infrastructure. -tags: [ 'serverless', 'elasticsearch', 'ingest', 'connector', how to' ] +tags: [ 'serverless', 'elasticsearch', 'ingest', 'connector', 'how to' ] status: in review --- - This page contains high-level instructions about setting up connector clients in your project's UI. + This page contains high-level instructions about setting up connector clients in your project's UI. Because prerequisites and configuration details vary by data source, you'll need to refer to the individual connector documentation for specific details. @@ -94,7 +94,7 @@ You'll need to update these values in your [`config.yml`](https://github.com/ela ## Step 2: Deploy your self-managed connector -To use connector clients, you must deploy the connector service so your connector can talk to your ((es)) instance. +To use connector clients, you must deploy the connector service so your connector can talk to your ((es)) instance. The source code is hosted in the `elastic/connectors` repository. You have two deployment options: @@ -168,7 +168,7 @@ Find all available Docker images in the [official Elastic Docker registry](https ### Run from source -Running from source requires cloning the repository and running the code locally. +Running from source requires cloning the repository and running the code locally. Use this approach if you're actively customizing connectors. Follow these steps: diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc index 0a5ca16e3a..16b57917ab 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc @@ -1,4 +1,4 @@ -[[ingest-data-through-logstash]] +[[elasticsearch-ingest-data-through-logstash]] = Logstash :description: Use {ls} to ship data to {es}. @@ -12,7 +12,7 @@ It supports a wide variety of data sources, and can dynamically unify data from {ls} can collect data using a variety of {ls} {logstash-ref}/input-plugins.html[input plugins], enrich and transform the data with {ls} {logstash-ref}/filter-plugins.html[filter plugins], and output the data to {es} using the {ls} {logstash-ref}/plugins-outputs-elasticsearch.html[Elasticsearch output plugin]. -You can use {ls} to extend <> for advanced use cases, +You can use {ls} to extend <> for advanced use cases, such as data routed to multiple destinations or when you need to make your data persistent. .Logstash for Elasticsearch on serverless @@ -37,7 +37,7 @@ Set the value to port `:443` instead. ==== [discrete] -[[ingest-data-through-logstash-requirements]] +[[elasticsearch-ingest-data-through-logstash-requirements]] == Requirements To use {ls} to send data to {es3}, you must be using: @@ -48,7 +48,7 @@ To use {ls} to send data to {es3}, you must be using: * {ls} {logstash-ref}/plugins-filters-elasticsearch.html[{es} filter plugin] 3.16.0 or later [discrete] -[[ingest-data-through-logstash-secure-connection]] +[[elasticsearch-ingest-data-through-logstash-secure-connection]] == Secure connection Serverless Elasticsearch simplifies secure communication between {ls} and {es}. @@ -58,7 +58,7 @@ Configure the {logstash-ref}/plugins-outputs-elasticsearch.html[Elasticsearch ou No additional SSL configuration steps are needed. [discrete] -[[ingest-data-through-logstash-api-keys-for-connecting-ls-to-es3]] +[[elasticsearch-ingest-data-through-logstash-api-keys-for-connecting-ls-to-es3]] == API keys for connecting {ls} to {es3} Use the **Security: API key** section in the UI to https://www.elastic.co/docs/current/serverless/api-keys[create an API key] @@ -82,7 +82,7 @@ output { ---- [discrete] -[[ingest-data-through-logstash-migrating-elasticsearch-data-using-ls]] +[[elasticsearch-ingest-data-through-logstash-migrating-elasticsearch-data-using-ls]] == Migrating Elasticsearch data using {ls} You can use {ls} to migrate data from self-managed {es} or {ess} to {es3}, or to migrate data from one {es3} deployment to another. @@ -91,10 +91,10 @@ Create a {logstash-ref}/configuration.html[{ls} pipeline] that includes the {es} Configure the {es} input to point to your source deployment or instance, and configure the {es} output with the `cloud_id` and `api_key` settings for your target {es3} instance. -If your origin index is using <>, then you might need to adjust your index settings. +If your origin index is using <>, then you might need to adjust your index settings. [discrete] -[[ingest-data-through-logstash-next-steps]] +[[elasticsearch-ingest-data-through-logstash-next-steps]] == Next steps Check out the https://www.elastic.co/logstash[Logstash product page] to see what {ls} can do for you. diff --git a/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc index 54487942a1..2f9daab4a6 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc @@ -1,4 +1,4 @@ -[[ingest-data-file-upload]] +[[elasticsearch-ingest-data-file-upload]] = Upload a file :description: Add data to {es} using the File Uploader. @@ -27,7 +27,7 @@ File formats supported up to 60 MB: * Open Document Format (ODF) [discrete] -[[ingest-data-file-upload-how-to-upload-a-file]] +[[elasticsearch-ingest-data-file-upload-how-to-upload-a-file]] == How to upload a file You'll find a link to the File Uploader on the {es} **Home** page. diff --git a/serverless/pages/elasticsearch/ingest-your-data.asciidoc b/serverless/pages/elasticsearch/ingest-your-data.asciidoc index 91fd190989..585c9d57a9 100644 --- a/serverless/pages/elasticsearch/ingest-your-data.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data.asciidoc @@ -1,4 +1,4 @@ -[[ingest-your-data]] +[[elasticsearch-ingest-your-data]] = Ingest your data :description: Add data to your {es} project. @@ -8,11 +8,11 @@ preview:[] You have many options for ingesting, or indexing, data into {es}: -* <> -* <> -* <> -* <> -* <> +* <> +* <> +* <> +* <> +* <> The best ingest option(s) for your use case depends on whether you are indexing general content or time series (timestamped) data. diff --git a/serverless/pages/elasticsearch/knn-search.asciidoc b/serverless/pages/elasticsearch/knn-search.asciidoc index 97ed5156b6..acc5e00ebe 100644 --- a/serverless/pages/elasticsearch/knn-search.asciidoc +++ b/serverless/pages/elasticsearch/knn-search.asciidoc @@ -1,4 +1,4 @@ -[[knn-search]] +[[elasticsearch-knn-search]] = k-nearest neighbor (kNN) search :description: Vector search with k-nearest neighbor (kNN). @@ -16,7 +16,7 @@ Common use cases for kNN include: * Similarity search for images or videos [discrete] -[[knn-search-prerequisites]] +[[elasticsearch-knn-search-prerequisites]] == Prerequisites * To run a kNN search, you must be able to convert your data into meaningful @@ -36,7 +36,7 @@ based on a similarity metric, the better its match. ** `read` to search the index [discrete] -[[knn-search-knn-methods]] +[[elasticsearch-knn-search-knn-methods]] == kNN methods {es} supports two methods for kNN search: @@ -58,7 +58,7 @@ filter your data to a small subset of documents, you can get good search performance using this approach. [discrete] -[[knn-search-approximate-knn]] +[[elasticsearch-knn-search-approximate-knn]] == Approximate kNN To run an approximate kNN search, use the {ref}/knn-search.html#approximate-knn[`knn` option] @@ -162,7 +162,7 @@ the similarity between the query and document vector. See search scores are computed. [discrete] -[[knn-search-tune-approximate-knn-for-speed-or-accuracy]] +[[elasticsearch-knn-search-tune-approximate-knn-for-speed-or-accuracy]] === Tune approximate kNN for speed or accuracy To gather results, the kNN search API finds a `num_candidates` number of @@ -268,7 +268,7 @@ curl -X POST "${ES_URL}/byte-image-index/_search" \ // TEST[s/"num_candidates": 100/"num_candidates": 3/] [discrete] -[[knn-search-filtered-knn-search]] +[[elasticsearch-knn-search-filtered-knn-search]] === Filtered kNN search The kNN search API supports restricting the search using a filter. The search @@ -313,7 +313,7 @@ returns fewer than k results, even when there are enough matching documents. ==== [discrete] -[[knn-search-combine-approximate-knn-with-other-features]] +[[elasticsearch-knn-search-combine-approximate-knn-with-other-features]] === Combine approximate kNN with other features You can perform 'hybrid retrieval' by providing both the @@ -361,14 +361,14 @@ each score in the sum. In the example above, the scores will be calculated as score = 0.9 * match_score + 0.1 * knn_score ---- -The `knn` option can also be used with <>. +The `knn` option can also be used with <>. In general, {es} computes aggregations over all documents that match the search. So for approximate kNN search, aggregations are calculated on the top `k` nearest documents. If the search also includes a `query`, then aggregations are calculated on the combined set of `knn` and `query` matches. [discrete] -[[knn-search-perform-semantic-search]] +[[elasticsearch-knn-search-perform-semantic-search]] === Perform semantic search kNN search enables you to perform semantic search by using a previously deployed @@ -431,7 +431,7 @@ embeddings, refer to this {ml-docs}/ml-nlp-text-emb-vector-search-example.html[end-to-end example]. [discrete] -[[knn-search-search-multiple-knn-fields]] +[[elasticsearch-knn-search-search-multiple-knn-fields]] === Search multiple kNN fields In addition to 'hybrid retrieval', you can search more than one kNN vector field at a time: @@ -486,7 +486,7 @@ score = 0.9 * match_score + 0.1 * knn_score_image-vector + 0.5 * knn_score_title ---- [discrete] -[[knn-search-search-knn-with-expected-similarity]] +[[elasticsearch-knn-search-search-knn-with-expected-similarity]] === Search kNN with expected similarity While kNN is a powerful tool, it always tries to return `k` nearest neighbors. Consequently, when using `knn` with @@ -973,7 +973,7 @@ curl -X PUT "${ES_URL}/image-index" \ ---- [discrete] -[[knn-search-limitations-for-approximate-knn-search]] +[[elasticsearch-knn-search-limitations-for-approximate-knn-search]] === Limitations for approximate kNN search {es} uses the https://arxiv.org/abs/1603.09320[HNSW algorithm] to support diff --git a/serverless/pages/elasticsearch/search-playground.asciidoc b/serverless/pages/elasticsearch/search-playground.asciidoc index f7ef5a69df..b93241827d 100644 --- a/serverless/pages/elasticsearch/search-playground.asciidoc +++ b/serverless/pages/elasticsearch/search-playground.asciidoc @@ -1,4 +1,4 @@ -[[playground]] +[[elasticsearch-playground]] = Playground :description: Test and edit Elasticsearch queries and chat with your data using LLMs. diff --git a/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc b/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc index af56be645c..2b04c4a460 100644 --- a/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc +++ b/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc @@ -1,4 +1,4 @@ -[[search-your-data-the-search-api]] +[[elasticsearch-search-your-data-the-search-api]] = The search API :description: Run queries and aggregations with the search API. diff --git a/serverless/pages/elasticsearch/search-your-data.asciidoc b/serverless/pages/elasticsearch/search-your-data.asciidoc index ad422f99cd..7e873a6f09 100644 --- a/serverless/pages/elasticsearch/search-your-data.asciidoc +++ b/serverless/pages/elasticsearch/search-your-data.asciidoc @@ -1,4 +1,4 @@ -[[search-your-data]] +[[elasticsearch-search-your-data]] = Search your data :description: Use the search API to run queries on your data. @@ -14,7 +14,7 @@ You can think of a query as a question, written in a way {es} understands. Depen * What users on my network ran regsvr32.exe within the last week? * What pages on my website contain a specific word or phrase? -You run search queries using the <>. The API supports several query types and search methods: +You run search queries using the <>. The API supports several query types and search methods: **Search for exact values.** Use {ref}/term-level-queries.html[term-level queries] to filter numbers, dates, IPs, or strings based on exact values or ranges. @@ -23,6 +23,6 @@ Use {ref}/term-level-queries.html[term-level queries] to filter numbers, dates, Use {ref}/full-text-queries.html[full-text queries] to query {ref}/analysis.html#analysis[unstructured text] and find documents that best match query terms. Use <> to search for words or phrases that have the same or similar meaning. **Vector search.** -Store dense vectors in {es} and use <> to find similar vectors. +Store dense vectors in {es} and use <> to find similar vectors. You can also use Elastic's natural language processing (NLP) model to encode text as sparse or dense vectors. Then use <> to find data based on the intent and contextual meaning rather than matching keywords. diff --git a/serverless/pages/elasticsearch/serverless-differences.asciidoc b/serverless/pages/elasticsearch/serverless-differences.asciidoc index 7161d8d627..010425ceab 100644 --- a/serverless/pages/elasticsearch/serverless-differences.asciidoc +++ b/serverless/pages/elasticsearch/serverless-differences.asciidoc @@ -1,4 +1,4 @@ -[[differences]] +[[elasticsearch-differences]] = Differences from other Elasticsearch offerings :description: Understand how serverless Elasticsearch differs from Elastic Cloud Hosted and self-managed offerings. @@ -22,13 +22,13 @@ complexity by optimizing your cluster performance for you. Data stream lifecycle is an optimized lifecycle tool that lets you focus on the most common lifecycle management needs, without unnecessary hardware-centric concepts like data tiers. + -* **Watcher** is not available, in favor of **<>**. +* **Watcher** is not available, in favor of **<>**. + Kibana Alerts allows rich integrations across use cases like APM, metrics, security, and uptime. Prepackaged rule types simplify setup and hide the details of complex, domain-specific detections, while providing a consistent interface across Kibana. + * Certain APIs, API parameters, index, cluster and node level settings are not available. Refer to our -<> for a list of available APIs. +<> for a list of available APIs. + Serverless Elasticsearch manages the underlying Elastic cluster for you, optimizing nodes, shards, and replicas for your use case. Because of this, various management and monitoring APIs, API parameters and settings are not available on Serverless. @@ -38,5 +38,5 @@ Because of this, various management and monitoring APIs, API parameters and sett .Other limitations [IMPORTANT] ==== -For serverless technical preview limitations, refer to <>. +For serverless technical preview limitations, refer to <>. ==== diff --git a/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc b/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc index 1d4cb6e165..9b38795256 100644 --- a/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc +++ b/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc @@ -1,4 +1,4 @@ -[[technical-preview-limitations]] +[[elasticsearch-technical-preview-limitations]] = Technical preview limitations :description: Review the limitations that apply to Elasticsearch projects. @@ -13,7 +13,7 @@ The following are currently not available: * Cross-cluster search and cross-cluster replication * Snapshot and restore * Clone index API -* Migrations from non-serverless {es} deployments. In the interim, you can <> to move data to and from serverless projects. +* Migrations from non-serverless {es} deployments. In the interim, you can <> to move data to and from serverless projects. * Custom roles * Audit logging * Elasticsearch for Apache Hadoop diff --git a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc index a5087641bc..3281a97da7 100644 --- a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc +++ b/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc @@ -12,21 +12,21 @@ Elasticsearch allows you to build custom applications. Whether you have structur .Understanding Elasticsearch on serverless [IMPORTANT] ==== -Refer to <> and <> for important details, including features and limitations specific to {es} on serverless. +Refer to <> and <> for important details, including features and limitations specific to {es} on serverless. ==== [discrete] == Get started -* <>: Create your first Elasticsearch project. -* <>: Learn how to get your data into Elasticsearch. +* <>: Create your first Elasticsearch project. +* <>: Learn how to get your data into Elasticsearch. [discrete] == How to -* <>: Build your queries to perform and combine many types of searches. -* <>: Search, filter your data, and display your findings. -* <>: Create rules to detect complex conditions and trigger alerts. -* <>: Send requests with Console and profile queries with Search Profiler. -* <>: Manage user access, billing, and check performance metrics. +* <>: Build your queries to perform and combine many types of searches. +* <>: Search, filter your data, and display your findings. +* <>: Create rules to detect complex conditions and trigger alerts. +* <>: Send requests with Console and profile queries with Search Profiler. +* <>: Manage user access, billing, and check performance metrics. diff --git a/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc b/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc index 3917d19188..0c5f77fca7 100644 --- a/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc +++ b/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc @@ -1,4 +1,4 @@ -[[join-organization-from-existing-cloud-account]] +[[general-join-organization-from-existing-cloud-account]] = Join an organization from an existing Elastic Cloud account :description: Join a new organization and bring over your projects. diff --git a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc index e65d0ae8ee..d7eb19364b 100644 --- a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc +++ b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc @@ -1,4 +1,4 @@ -[[assign-user-roles]] +[[general-assign-user-roles]] = Assign user roles and privileges :description: Manage the predefined set of roles and privileges for all your projects. @@ -8,21 +8,21 @@ preview:[] Within an organization, users can have one or more roles and each role grants specific privileges. -You must assign user roles when you <>. +You must assign user roles when you <>. To subsequently edit the roles assigned to a user: . Go to the user icon on the header bar and select **Organization**. . Find the user on the **Members** tab of the **Organization** page. Click the member name to view and edit its roles. [discrete] -[[assign-user-roles-organization-level-roles]] +[[general-assign-user-roles-organization-level-roles]] == Organization-level roles * **Organization owner**. Can manage all roles under the organization and has full access to all serverless projects, organization-level details, billing details, and subscription levels. This role is assigned by default to the person who created the organization. * **Billing admin**. Has access to all invoices and payment methods. Can make subscription changes. [discrete] -[[assign-user-roles-instance-access-roles]] +[[general-assign-user-roles-instance-access-roles]] == Instance access roles Each serverless project type has a set of predefined roles that you can assign to your organization members. @@ -42,7 +42,7 @@ To assign a custom role to users, go to "Instance access roles" and select it fr endif::[] [discrete] -[[assign-user-roles-es]] +[[general-assign-user-roles-es]] === {es} * **Admin**. Has full access to project management, properties, and security privileges. Admins log into projects with superuser role privileges. @@ -50,7 +50,7 @@ endif::[] * **Viewer**. Has read-only access to project details, data, and features. [discrete] -[[assign-user-roles-observability]] +[[general-assign-user-roles-observability]] === {observability} * **Admin**. Has full access to project management, properties, and security privileges. Admins log into projects with superuser role privileges. @@ -58,7 +58,7 @@ endif::[] * **Viewer**. Has read-only access to project details, data, and features. [discrete] -[[assign-user-roles-security]] +[[general-assign-user-roles-security]] === {security} * **Admin**. Has full access to project management, properties, and security privileges. Admins log into projects with superuser role privileges. diff --git a/serverless/pages/general/manage-access-to-org.asciidoc b/serverless/pages/general/manage-access-to-org.asciidoc index 7c1b8354bb..2423b0f0e5 100644 --- a/serverless/pages/general/manage-access-to-org.asciidoc +++ b/serverless/pages/general/manage-access-to-org.asciidoc @@ -1,4 +1,4 @@ -[[manage-access-to-organization]] +[[general-manage-access-to-organization]] = Invite your team :description: Add members to your organization and projects. @@ -14,7 +14,7 @@ Alternatively, {cloud}/ec-saml-sso.html[configure {ecloud} SAML SSO] to enable y You can add multiple members by entering their email addresses separated by a space. + You can grant access to all projects of the same type with a unique role, or select individual roles for specific projects. -For more details about roles, refer to <>. +For more details about roles, refer to <>. . Click **Send invites**. + Invitations to join an organization are sent by email. Invited users have 72 hours to accept the invitation. If they do not join within that period, you will have to send a new invitation. @@ -24,7 +24,7 @@ On the **Members** tab of the **Organization** page, you can view the list of cu In the **Actions** column, click the three dots to edit a member’s role or revoke the invite. [discrete] -[[manage-access-to-organization-leave-an-organization]] +[[general-manage-access-to-organization-leave-an-organization]] == Leave an organization On the **Organization** page, click **Leave organization**. diff --git a/serverless/pages/general/manage-billing-check-subscription.asciidoc b/serverless/pages/general/manage-billing-check-subscription.asciidoc index 5c03d2c190..e03953bbb8 100644 --- a/serverless/pages/general/manage-billing-check-subscription.asciidoc +++ b/serverless/pages/general/manage-billing-check-subscription.asciidoc @@ -1,4 +1,4 @@ -[[check-subscription]] +[[general-check-subscription]] = Check your subscription :description: Manage your account details and subscription level. diff --git a/serverless/pages/general/manage-billing-history.asciidoc b/serverless/pages/general/manage-billing-history.asciidoc index db216feff6..d4eb192ad2 100644 --- a/serverless/pages/general/manage-billing-history.asciidoc +++ b/serverless/pages/general/manage-billing-history.asciidoc @@ -1,4 +1,4 @@ -[[billing-history]] +[[general-billing-history]] = Check your billing history :description: Monitor payments and billing receipts. diff --git a/serverless/pages/general/manage-billing-monitor-usage.asciidoc b/serverless/pages/general/manage-billing-monitor-usage.asciidoc index a804c10086..065bf585e9 100644 --- a/serverless/pages/general/manage-billing-monitor-usage.asciidoc +++ b/serverless/pages/general/manage-billing-monitor-usage.asciidoc @@ -1,4 +1,4 @@ -[[monitor-usage]] +[[general-monitor-usage]] = Monitor your account usage :description: Check the usage breakdown of your account. @@ -18,7 +18,7 @@ On the **Usage** page you can: [IMPORTANT] ==== -The usage breakdown information is an estimate. To get the exact amount you owe for a given month, check your invoices in the <>. +The usage breakdown information is an estimate. To get the exact amount you owe for a given month, check your invoices in the <>. ==== .Elasticsearch minimum runtime VCUs diff --git a/serverless/pages/general/manage-billing-pricing-model.asciidoc b/serverless/pages/general/manage-billing-pricing-model.asciidoc index 7602c012cd..9ab07fbf51 100644 --- a/serverless/pages/general/manage-billing-pricing-model.asciidoc +++ b/serverless/pages/general/manage-billing-pricing-model.asciidoc @@ -1,4 +1,4 @@ -[[serverless-billing]] +[[general-serverless-billing]] = Serverless billing dimensions :description: Understand how usage affects serverless pricing. @@ -26,7 +26,7 @@ To learn about billing dimensions for specific offerings, refer to: == Add-ons [discrete] -[[serverless-billing-data-out]] +[[general-serverless-billing-data-out]] === Data out _Data out_ accounts for all of the traffic coming out of a serverless project. @@ -37,7 +37,7 @@ Data coming out of the project through AWS PrivateLink, GCP Private Service Conn or Azure Private Link is also considered data out. [discrete] -[[serverless-billing-support]] +[[general-serverless-billing-support]] === Support If your subscription level is Standard, there is no separate charge for Support reflected on your bill. diff --git a/serverless/pages/general/manage-billing-stop-project.asciidoc b/serverless/pages/general/manage-billing-stop-project.asciidoc index 4fd1b03847..1b3d83546b 100644 --- a/serverless/pages/general/manage-billing-stop-project.asciidoc +++ b/serverless/pages/general/manage-billing-stop-project.asciidoc @@ -1,4 +1,4 @@ -[[billing-stop-project]] +[[general-billing-stop-project]] = Stop charges for a project :description: How to stop charges for a project. diff --git a/serverless/pages/general/manage-billing.asciidoc b/serverless/pages/general/manage-billing.asciidoc index 1fb6f06325..44c4685784 100644 --- a/serverless/pages/general/manage-billing.asciidoc +++ b/serverless/pages/general/manage-billing.asciidoc @@ -1,4 +1,4 @@ -[[manage-billing]] +[[general-manage-billing]] = Manage billing of your organization :description: Configure the billing details of your organization. @@ -23,10 +23,10 @@ You can manage the billing details of your organization directly from the Elasti From the **Billing pages**, you can perform the following tasks: -* <> -* <> -* <> +* <> +* <> +* <> -If you have a project that you're no longer using, refer to <>. +If you have a project that you're no longer using, refer to <>. -To learn about the serverless pricing model, refer to <> and our https://www.elastic.co/pricing/serverless-search[pricing page]. +To learn about the serverless pricing model, refer to <> and our https://www.elastic.co/pricing/serverless-search[pricing page]. diff --git a/serverless/pages/general/manage-org.asciidoc b/serverless/pages/general/manage-org.asciidoc index 9ef5be02b2..31da7a3451 100644 --- a/serverless/pages/general/manage-org.asciidoc +++ b/serverless/pages/general/manage-org.asciidoc @@ -1,4 +1,4 @@ -[[manage-organization]] +[[general-manage-organization]] = Manage your organization :description: Manage your instances, users, and settings. @@ -10,6 +10,6 @@ When you sign up to Elastic Cloud, you create an **organization**. This organization is the umbrella for all of your Elastic Cloud resources, users, and account settings. Every organization has a unique identifier. Bills are invoiced according to the billing contact and details that you set for your organization. -* <> -* <> -* <> +* <> +* <> +* <> diff --git a/serverless/pages/general/manage-your-project-rest-api.asciidoc b/serverless/pages/general/manage-your-project-rest-api.asciidoc index 8e6872ae58..b3c1336520 100644 --- a/serverless/pages/general/manage-your-project-rest-api.asciidoc +++ b/serverless/pages/general/manage-your-project-rest-api.asciidoc @@ -1,4 +1,4 @@ -[[manage-project-with-api]] +[[general-manage-project-with-api]] = Using the Project Management REST API :description: Manage your organization's serverless projects using the REST API. @@ -14,7 +14,7 @@ More APIs let you interact with data, capabilities, and settings inside of speci ==== [discrete] -[[manage-project-with-api-api-principles]] +[[general-manage-project-with-api-api-principles]] == API Principles * The Elastic Cloud REST API is built following REST principles: @@ -25,7 +25,7 @@ More APIs let you interact with data, capabilities, and settings inside of speci * JSON is the data interchange format. [discrete] -[[manage-project-with-api-authentication]] +[[general-manage-project-with-api-authentication]] == Authentication API keys are used to authenticate requests to the Elastic Cloud REST API. @@ -46,7 +46,7 @@ curl -H "Authorization: ApiKey essu_..." https://api.elastic-cloud.com/api/v1/se ---- [discrete] -[[manage-project-with-api-open-api-specification]] +[[general-manage-project-with-api-open-api-specification]] == Open API Specification The Project Management API is documented using the https://en.wikipedia.org/wiki/OpenAPI_Specification[OpenAPI Specification]. The current supported version of the specification is `3.0`. @@ -56,7 +56,7 @@ For details, check the https://www.elastic.co/docs/api/doc/elastic-cloud-serverl This specification can be used to generate client SDKs, or on tools that support it, such as the https://editor.swagger.io[Swagger Editor]. [discrete] -[[manage-project-with-api-examples]] +[[general-manage-project-with-api-examples]] == Examples To try the examples in this section: @@ -70,7 +70,7 @@ export API_KEY="YOUR_GENERATED_API_KEY" ---- [discrete] -[[manage-project-with-api-create-a-serverless-elasticsearch-project]] +[[general-manage-project-with-api-create-a-serverless-elasticsearch-project]] === Create a serverless Elasticsearch project [source,bash] @@ -86,7 +86,7 @@ curl -H "Authorization: ApiKey $API_KEY" \ <1> Replace **`My project`** with a more descriptive name in this call. -<2> You can <>. +<2> You can <>. The response from the create project request will include the created project details, such as the project ID, the credentials to access the project, and the endpoints to access different apps such as Elasticsearch and Kibana. @@ -117,7 +117,7 @@ export PROJECT_ID=cace8e65457043698ed3d99da2f053f6 ---- [discrete] -[[manage-project-with-api-get-project]] +[[general-manage-project-with-api-get-project]] === Get project You can retrieve your project details through an API call: @@ -129,7 +129,7 @@ curl -H "Authorization: ApiKey $API_KEY" \ ---- [discrete] -[[manage-project-with-api-get-project-status]] +[[general-manage-project-with-api-get-project-status]] === Get project status The 'status' endpoint indicates whether the project is initialized and ready to be used. In the response, the project's `phase` will change from "initializing" to "initialized" when it is ready: @@ -150,7 +150,7 @@ Example response: ---- [discrete] -[[manage-project-with-api-reset-credentials]] +[[general-manage-project-with-api-reset-credentials]] === Reset Credentials If you lose the credentials provided at the time of the project creation, you can reset the credentials by using the following endpoint: @@ -163,7 +163,7 @@ curl -H "Authorization: ApiKey $API_KEY" \ ---- [discrete] -[[manage-project-with-api-delete-project]] +[[general-manage-project-with-api-delete-project]] === Delete Project You can delete your project via the API: @@ -175,7 +175,7 @@ curl -XDELETE -H "Authorization: ApiKey $API_KEY" \ ---- [discrete] -[[manage-project-with-api-update-project]] +[[general-manage-project-with-api-update-project]] === Update Project You can update your project using a PATCH request. Only the fields included in the body of the request will be updated. @@ -192,7 +192,7 @@ curl -H "Authorization: ApiKey $API_KEY" \ ---- [discrete] -[[manage-project-with-api-list-available-regions]] +[[general-manage-project-with-api-list-available-regions]] === List available regions You can obtain the list of regions where projects can be created using the API: diff --git a/serverless/pages/general/manage-your-project.asciidoc b/serverless/pages/general/manage-your-project.asciidoc index 941d5779a3..77da7f1ce1 100644 --- a/serverless/pages/general/manage-your-project.asciidoc +++ b/serverless/pages/general/manage-your-project.asciidoc @@ -1,4 +1,4 @@ -[[manage-project]] +[[elasticsearch-manage-project]] = Manage your projects :description: Configure project-wide features and usage. @@ -20,7 +20,7 @@ From the project page, you can: * **Manage members**. Add members and manage their access to this project or other resources of your organization. [discrete] -[[manage-project-search-ai-lake-settings]] +[[elasticsearch-manage-project-search-ai-lake-settings]] == Search AI Lake settings Once ingested, your data is stored in cost-efficient, general storage. A cache layer is available on top of the general storage for recent and frequently queried data that provides faster search speed. Data in this cache layer is considered **search-ready**. @@ -45,19 +45,19 @@ The **Cost-efficient** Search Power setting limits the available cache size, and The **Balanced** Search Power setting ensures that there is sufficient cache for all search-ready data, in order to respond quickly to queries. The **Performance** Search Power setting provides more computing resources in addition to the searchable data cache, in order to respond quickly to higher query volumes and more complex queries. -| +| {es-badge} | **Search Boost Window** a| Non-time series data is always considered search-ready. The **Search Boost Window** determines the volume of time series project data that will be considered search-ready. Increasing the window results in a bigger portion of time series project data included in the total search-ready data volume. -| +| {es-badge} | **Data Retention** a| Data retention policies determine how long your project data is retained. You can specify different retention periods for specific data streams in your project. -| +| {es-badge}{obs-badge}{sec-badge} | a| **Maximum data retention period** @@ -65,24 +65,24 @@ a| **Maximum data retention period** When enabled, this setting determines the maximum length of time that data can be retained in any data streams of this project. Editing this setting replaces the data retention set for all data streams of the project that have a longer data retention defined. Data older than the new maximum retention period that you set is permanently deleted. -| +| {sec-badge} | a| **Default data retention period** When enabled, this setting determines the default retention period that is automatically applied to all data streams in your project that do not have a custom retention period already set. -| +| {sec-badge} | **Project features** | Controls <> for your {elastic-sec} project. -| +| {sec-badge} |=== [discrete] [[project-features-add-ons]] == Project features and add-ons - For {elastic-sec} projects, edit the **Project features** to select a feature tier and enable add-on options for specific use cases. +{sec-badge} For {elastic-sec} projects, edit the **Project features** to select a feature tier and enable add-on options for specific use cases. |=== | Feature tier | Description and add-ons @@ -101,7 +101,7 @@ a| Everything in **Security Analytics Essentials** plus advanced features such a |=== [discrete] -[[manage-project-downgrading-the-feature-tier]] +[[elasticsearch-manage-project-downgrading-the-feature-tier]] === Downgrading the feature tier When you downgrade your Security project features selection from **Security Analytics Complete** to **Security Analytics Essentials**, the following features become unavailable: diff --git a/serverless/pages/general/service-status.asciidoc b/serverless/pages/general/service-status.asciidoc index 1c35af754e..f132eaefb5 100644 --- a/serverless/pages/general/service-status.asciidoc +++ b/serverless/pages/general/service-status.asciidoc @@ -1,4 +1,4 @@ -[[serverless-status]] +[[general-serverless-status]] = Monitor serverless status :keywords: serverless @@ -9,7 +9,7 @@ When availability changes, Elastic makes sure to provide you with a current serv To check current and past service availability, go to the Elastic serverless https://serverless-preview-status.statuspage.io/[service status] page. [discrete] -[[serverless-status-subscribe-to-updates]] +[[general-serverless-status-subscribe-to-updates]] == Subscribe to updates You can be notified about changes to the service status automatically. diff --git a/serverless/pages/general/sign-up.asciidoc b/serverless/pages/general/sign-up.asciidoc index 5faf81031d..f6eec4b81e 100644 --- a/serverless/pages/general/sign-up.asciidoc +++ b/serverless/pages/general/sign-up.asciidoc @@ -1,4 +1,4 @@ -[[sign-up-trial]] +[[general-sign-up-trial]] = Get started with serverless :description: Information about signing up for a serverless Elastic Cloud trial @@ -10,7 +10,7 @@ There are two options to create serverless projects: * If you are a new user, you can https://cloud.elastic.co/serverless-registration[sign up for a free 14-day trial], and you will be able to launch a serverless project. [discrete] -[[sign-up-trial-what-is-included-in-my-trial]] +[[general-sign-up-trial-what-is-included-in-my-trial]] == What is included in my trial? Your free 14-day trial includes: @@ -36,7 +36,7 @@ During the trial period, you are limited to one active hosted deployment and one ==== [discrete] -[[sign-up-trial-what-limits-are-in-place-during-a-trial]] +[[general-sign-up-trial-what-limits-are-in-place-during-a-trial]] == What limits are in place during a trial? During the free 14 day trial, Elastic provides access to one hosted deployment and one serverless project. If all you want to do is try out Elastic, the trial includes more than enough to get you started. During the trial period, some limitations apply. @@ -66,13 +66,13 @@ To remove limitations, subscribe to https://www.elastic.co/guide/en/cloud/curren You can subscribe to Elastic Cloud at any time during your trial. /serverless/general/serverless-billing[Billing] starts when you subscribe. To maximize the benefits of your trial, subscribe at the end of the free period. To monitor charges, anticipate future costs, and adjust your usage, check your https://www.elastic.co/guide/en/cloud/current/ec-account-usage.html[account usage] and https://www.elastic.co/guide/en/cloud/current/ec-billing-history.html[billing history]. [discrete] -[[sign-up-trial-how-do-i-get-started-with-my-trial]] +[[general-sign-up-trial-how-do-i-get-started-with-my-trial]] == How do I get started with my trial? Start by checking out some common approaches for https://www.elastic.co/guide/en/cloud/current/ec-cloud-ingest-data.html#ec-ingest-methods[moving data into Elastic Cloud]. [discrete] -[[sign-up-trial-what-happens-at-the-end-of-the-trial]] +[[general-sign-up-trial-what-happens-at-the-end-of-the-trial]] == What happens at the end of the trial? When your trial expires, the deployment and project that you created during the trial period are suspended until you subscribe to https://www.elastic.co/guide/en/cloud/current/ec-billing-details.html[Elastic Cloud]. When you subscribe, you are able to resume your deployment and serverless project, and regain access to the ingested data. After your trial expires, you have 30 days to subscribe. After 30 days, your deployment, serverless project, and ingested data are permanently deleted. @@ -80,7 +80,7 @@ When your trial expires, the deployment and project that you created during the If you’re interested in learning more ways to subscribe to Elastic Cloud, don’t hesitate to https://www.elastic.co/contact[contact us]. [discrete] -[[sign-up-trial-how-do-i-sign-up-through-a-marketplace]] +[[general-sign-up-trial-how-do-i-sign-up-through-a-marketplace]] == How do I sign up through a marketplace? If you’re interested in consolidated billing, subscribe from the AWS Marketplace, which allows you to skip the trial period and connect your AWS Marketplace email to your unique Elastic account. @@ -91,7 +91,7 @@ Serverless projects are only available for AWS Marketplace. Support for GCP Mark ==== [discrete] -[[sign-up-trial-how-do-i-get-help]] +[[general-sign-up-trial-how-do-i-get-help]] == How do I get help? We’re here to help. If you have any questions, reach out to https://cloud.elastic.co/support[Support]. diff --git a/serverless/pages/general/user-profile.asciidoc b/serverless/pages/general/user-profile.asciidoc index ca593c45a3..041b29e53e 100644 --- a/serverless/pages/general/user-profile.asciidoc +++ b/serverless/pages/general/user-profile.asciidoc @@ -1,4 +1,4 @@ -[[user-profile]] +[[general-user-profile]] = Update your user profile :description: Manage your profile settings. @@ -9,7 +9,7 @@ preview:[] To edit your user profile, go to the user icon on the header bar and select **Profile**. [discrete] -[[user-profile-update-your-email-address]] +[[general-user-profile-update-your-email-address]] == Update your email address Your email address is used to sign in. If needed, you can change this email address. @@ -20,7 +20,7 @@ Your email address is used to sign in. If needed, you can change this email addr An email is sent to the new address with a link to confirm the change. If you don't get the email after a few minutes, check your spam folder. [discrete] -[[user-profile-change-your-password]] +[[general-user-profile-change-your-password]] == Change your password When you signed up with your email address, you selected a password that you use to log in to the Elastic Cloud console. If needed, you can change this password. @@ -38,7 +38,7 @@ If you don't know your current password: An email is sent to the address you specified with a link to reset the password. If you don't get the email after a few minutes, check your spam folder. [discrete] -[[user-profile-enable-multi-factor-authentication]] +[[general-user-profile-enable-multi-factor-authentication]] == Enable multi-factor authentication To add an extra layer of security, you can either set up Google authenticator or text messaging on a mobile device. diff --git a/serverless/pages/general/what-is-serverless.asciidoc b/serverless/pages/general/what-is-serverless.asciidoc index a238bf3031..7e2331c68d 100644 --- a/serverless/pages/general/what-is-serverless.asciidoc +++ b/serverless/pages/general/what-is-serverless.asciidoc @@ -1,4 +1,4 @@ -[[what-is-serverless-elastic]] +[[general-what-is-serverless-elastic]] = What is serverless Elastic? :keywords: serverless @@ -28,7 +28,7 @@ Until May 31, 2024, your serverless consumption will not incur any charges, but ==== [discrete] -[[what-is-serverless-elastic-control-your-data-and-performance]] +[[general-what-is-serverless-elastic-control-your-data-and-performance]] == Control your data and performance Control your project data and query performance against your project data. @@ -45,7 +45,7 @@ and you define the retention settings for your data streams. ==== [discrete] -[[what-is-serverless-elastic-differences-between-serverless-projects-and-hosted-deployments-on-ecloud]] +[[general-what-is-serverless-elastic-differences-between-serverless-projects-and-hosted-deployments-on-ecloud]] == Differences between serverless projects and hosted deployments on {ecloud} You can run https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[hosted deployments] of the {stack} on {ecloud}. These hosted deployments provide more provisioning and advanced configuration options. @@ -95,7 +95,7 @@ You can run https://www.elastic.co/guide/en/cloud/current/ec-getting-started.htm |=== [discrete] -[[what-is-serverless-elastic-answers-to-common-serverless-questions]] +[[general-what-is-serverless-elastic-answers-to-common-serverless-questions]] == Answers to common serverless questions **What Support is available for the serverless preview?** diff --git a/serverless/pages/project-settings/custom-roles.asciidoc b/serverless/pages/project-settings/custom-roles.asciidoc index 6bc9fb507e..c709612a09 100644 --- a/serverless/pages/project-settings/custom-roles.asciidoc +++ b/serverless/pages/project-settings/custom-roles.asciidoc @@ -1,17 +1,19 @@ -[[-serverless-custom-roles]] +[[custom-roles]] = Custom roles :description: Create and manage roles that grant privileges within your project. :keywords: serverless, Elasticsearch, Security -ifeval::["{serverlessCustomRoles}" == "false"] -coming:[Coming soonundefinedundefined] -endif::[] +// ifndef::serverlessCustomRoles[] +// coming:[] +// endif::[] -ifeval::["{serverlessCustomRoles}" == "true"] -preview:[]This content applies to: +// ifdef::serverlessCustomRoles[] +preview:[] -The built-in <> and <> are great for getting started with {serverless-full}, and for system administrators who do not need more restrictive access. +This content applies to: {es-badge} {sec-badge} + +The built-in <> and <> are great for getting started with {serverless-full}, and for system administrators who do not need more restrictive access. As an administrator, however, you have the ability to create your own roles to describe exactly the kind of access your users should have within a specific project. For example, you might create a marketing_user role, which you then assign to all users in your marketing department. @@ -41,7 +43,7 @@ You cannot assign {ref}/security-privileges.html#_run_as_privilege[run as privil ==== [discrete] -[[-serverless-custom-roles-es-cluster-privileges]] +[[custom-roles-es-cluster-privileges]] == {es} cluster privileges Cluster privileges grant access to monitoring and management features in {es}. @@ -55,7 +57,7 @@ image::images/custom-roles-cluster-privileges.png[Create a custom role and defin Refer to {ref}/security-privileges.html#privileges-list-cluster[cluster privileges] for a complete description of available options. [discrete] -[[-serverless-custom-roles-es-index-privileges]] +[[custom-roles-es-index-privileges]] == {es} index privileges Each role can grant access to multiple data indices, and each index can have a different set of privileges. @@ -76,7 +78,7 @@ With field-level security (FLS), you can instruct {es} to grant or deny access t // Derived from https://www.elastic.co/guide/en/kibana/current/kibana-role-management.html#adding_cluster_privileges [discrete] -[[-serverless-custom-roles-kib-privileges]] +[[custom-roles-kib-privileges]] == {kib} privileges When you create a custom role, click **Add Kibana privilege** to grant access to specific features. @@ -110,5 +112,5 @@ As new features are added to {serverless-full}, roles that use the custom option After your roles are set up, the next step to securing access is to assign roles to your users. Click the **Assign roles** link to go to the **Members** tab of the **Organization** page. -Learn more in <>. -endif::[] +Learn more in <>. +// endif::[] diff --git a/serverless/pages/project-settings/files.asciidoc b/serverless/pages/project-settings/files.asciidoc index b29d590e00..a0b9d90734 100644 --- a/serverless/pages/project-settings/files.asciidoc +++ b/serverless/pages/project-settings/files.asciidoc @@ -8,7 +8,7 @@ preview:[] This content applies to: {es-badge} {obs-badge} {sec-badge} -Several {serverless-full} features let you upload files. For example, you can add files to <> or upload a logo to an **Image** panel in a <>. +Several {serverless-full} features let you upload files. For example, you can add files to <> or upload a logo to an **Image** panel in a <>. You can access these uploaded files in **{project-settings} → {manage-app} → {files-app}**. diff --git a/serverless/pages/project-settings/maintenance-windows.mdx b/serverless/pages/project-settings/maintenance-windows.mdx index 06e7cf8aae..e215d36219 100644 --- a/serverless/pages/project-settings/maintenance-windows.mdx +++ b/serverless/pages/project-settings/maintenance-windows.mdx @@ -49,7 +49,7 @@ For example, you can suppress notifications for alerts from specific rules: - You can select only a single category when you turn on filters. - Some rules are not affected by maintenance window filters because their alerts do not contain requisite data. -In particular, [((stack-monitor-app))](((kibana-ref))/kibana-alerts.html), [tracking containment](((kibana-ref))geo-alerting.html), [((anomaly-jobs)) health](((ml-docs))/ml-configuring-alerts.html), and [transform health](((ref))/transform-alerts.html) rules are not affected by the filters. +In particular, [((stack-monitor-app))](((kibana-ref))/kibana-alerts.html), [tracking containment](((kibana-ref))/geo-alerting.html), [((anomaly-jobs)) health](((ml-docs))/ml-configuring-alerts.html), and [transform health](((ref))/transform-alerts.html) rules are not affected by the filters. A maintenance window can have any one of the following statuses: diff --git a/serverless/pages/project-settings/maps.asciidoc b/serverless/pages/project-settings/maps.asciidoc index 1dca2d1e90..7785d68614 100644 --- a/serverless/pages/project-settings/maps.asciidoc +++ b/serverless/pages/project-settings/maps.asciidoc @@ -65,7 +65,7 @@ Check out {kibana-ref}/import-geospatial-data.html[Import geospatial data]. Viewing data from different angles provides better insights. Dimensions that are obscured in one visualization might be illuminated in another. -Add your map to a <> and view your geospatial data alongside bar charts, pie charts, tag clouds, and more. +Add your map to a <> and view your geospatial data alongside bar charts, pie charts, tag clouds, and more. This choropleth map shows the density of non-emergency service requests in San Diego by council district. The map is embedded in a dashboard, so users can better understand when services are requested and gain insight into the top requested services. diff --git a/serverless/pages/project-settings/project-settings.asciidoc b/serverless/pages/project-settings/project-settings.asciidoc index d13ab982ca..dea1b6f52b 100644 --- a/serverless/pages/project-settings/project-settings.asciidoc +++ b/serverless/pages/project-settings/project-settings.asciidoc @@ -14,7 +14,7 @@ Go to **Project Settings** to manage your indices, data views, saved objects, se Access to individual features is governed by Elastic user roles. Consult your administrator if you do not have the appropriate access. -To learn more about roles, refer to <>. +To learn more about roles, refer to <>. |=== | Feature | Description | Available in @@ -23,7 +23,7 @@ To learn more about roles, refer to <>. | Create and manage keys that can send requests on behalf of users. | {es-badge}{obs-badge}{sec-badge} -| <> +| <> | Bulk assign asset criticality to multiple entities by importing a text file. | {sec-badge} @@ -35,7 +35,7 @@ To learn more about roles, refer to <>. | Manage the fields in the data views that retrieve your data from {es}. | {es-badge}{obs-badge}{sec-badge} -| <> +| <> | Manage entity risk scoring, and preview risky entities. | {sec-badge} diff --git a/serverless/pages/welcome-to-serverless.asciidoc b/serverless/pages/welcome-to-serverless.asciidoc index 5188c00e75..a407e0c041 100644 --- a/serverless/pages/welcome-to-serverless.asciidoc +++ b/serverless/pages/welcome-to-serverless.asciidoc @@ -66,19 +66,19 @@ Detect, investigate, and respond to threats with Elastic Security. |=== | -a| <> +a| <> Invite new members to your organization. -a| <> +a| <> Assign user roles and privileges to members in your organization. -a| <> +a| <> Manage your project data, search power, and more. -a| <> +a| <> View the details about your subscription. -a| <> +a| <> Check past and current usage for your projects. a| <> From eb30b7692d58b0ca252b712136392569f110555d Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Wed, 30 Oct 2024 08:27:40 -0500 Subject: [PATCH 07/25] fix attributes in parentheses --- .../explore-your-data-alerting.asciidoc | 2 +- ...ata-through-integrations-logstash.asciidoc | 2 +- .../serverless-differences.asciidoc | 2 +- .../manage-access-to-org-user-roles.asciidoc | 2 +- .../action-connectors.asciidoc | 42 +++++++++---------- .../project-settings/data-views.asciidoc | 2 +- .../maintenance-windows.asciidoc | 2 +- 7 files changed, 27 insertions(+), 27 deletions(-) diff --git a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc index 60db48de02..c96e30cf4f 100644 --- a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc @@ -54,7 +54,7 @@ After a rule is created, you can open the action menu (…) and select **Edit ru You can also manage rules as resources with the https://registry.terraform.io/providers/elastic/elasticstack/latest[Elasticstack provider] for Terraform. For more details, refer to the https://registry.terraform.io/providers/elastic/elasticstack/latest/docs/resources/kibana_alerting_rule[elasticstack_kibana_alerting_rule] resource. -// For details on what types of rules are available and how to configure them, refer to [Rule types]{(kibana-ref}/rule-types.html). +// For details on what types of rules are available and how to configure them, refer to [Rule types]({kibana-ref}/rule-types.html). // missing link diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc index 16b57917ab..5467de5e91 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc @@ -26,7 +26,7 @@ Some differences to note between {es3} and self-managed {es}: * Your logstash-output-elasticsearch configuration uses **API keys** to access {es} from {ls}. User-based security settings are ignored and may cause errors. -* {es3} uses **{dlm} {(dlm-init})** instead of {ilm} {(ilm-init}). +* {es3} uses **{dlm} ({dlm-init})** instead of {ilm} ({ilm-init}). If you add {ilm-init} settings to your {es} output configuration, they are ignored and may cause errors. * **{ls} monitoring** for {serverless-short} is available through the https://github.com/elastic/integrations/blob/main/packages/logstash/_dev/build/docs/README.md[{ls} Integration] in https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[Elastic Observability]. diff --git a/serverless/pages/elasticsearch/serverless-differences.asciidoc b/serverless/pages/elasticsearch/serverless-differences.asciidoc index 010425ceab..3dcbf9dc71 100644 --- a/serverless/pages/elasticsearch/serverless-differences.asciidoc +++ b/serverless/pages/elasticsearch/serverless-differences.asciidoc @@ -13,7 +13,7 @@ preview:[] Some features that are available in Elastic Cloud Hosted and self-managed offerings are not available in serverless {es}. These features have either been replaced by a new feature, or are not applicable in the new Serverless architecture: -* **Index lifecycle management {(ilm-init})** is not available, in favor of **https://www.elastic.co/docs/current/serverless/index-management[data stream lifecycle]**. +* **Index lifecycle management ({ilm-init})** is not available, in favor of **https://www.elastic.co/docs/current/serverless/index-management[data stream lifecycle]**. + In an Elastic Cloud Hosted or self-managed environment, {ilm-init} lets you automatically transition indices through data tiers according to your performance needs and retention requirements. This allows you to balance hardware costs with performance. Serverless Elasticsearch eliminates this diff --git a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc index d7eb19364b..7fff99b08d 100644 --- a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc +++ b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc @@ -28,7 +28,7 @@ To subsequently edit the roles assigned to a user: Each serverless project type has a set of predefined roles that you can assign to your organization members. You can assign the predefined roles: -* globally, for all projects of the same type {(es-serverless}, {observability}, or {security}). In this case, the role will also apply to new projects created later. +* globally, for all projects of the same type ({es-serverless}, {observability}, or {security}). In this case, the role will also apply to new projects created later. * individually, for specific projects only. To do that, you have to set the **Role for all** field of that specific project type to **None**. For example, you can assign a user the developer role for a specific {es-serverless} project: diff --git a/serverless/pages/project-settings/action-connectors.asciidoc b/serverless/pages/project-settings/action-connectors.asciidoc index 9bd1eb08fa..f3ff58cb5f 100644 --- a/serverless/pages/project-settings/action-connectors.asciidoc +++ b/serverless/pages/project-settings/action-connectors.asciidoc @@ -187,26 +187,26 @@ Actions are instantiations of a connector that are linked to rules and run as ba //// /* {kib} provides the following types of connectors for use with {alert-features} : -- [D3 Security]{(kibana-ref}/d3security-action-type.html) -- [Email]{(kibana-ref}/email-action-type.html) -- [Generative AI]{(kibana-ref}/gen-ai-action-type.html) -- [IBM Resilient]{(kibana-ref}/resilient-action-type.html) -- [Index]{(kibana-ref}/index-action-type.html) -- [Jira]{(kibana-ref}/jira-action-type.html) -- [Microsoft Teams]{(kibana-ref}/teams-action-type.html) -- [Opsgenie]{(kibana-ref}/opsgenie-action-type.html) -- [PagerDuty]{(kibana-ref}/pagerduty-action-type.html) -- [ServerLog]{(kibana-ref}/server-log-action-type.html) -- [ServiceNow ITSM]{(kibana-ref}/servicenow-action-type.html) -- [ServiceNow SecOps]{(kibana-ref}/servicenow-sir-action-type.html) -- [ServiceNow ITOM]{(kibana-ref}/servicenow-itom-action-type.html) -- [Slack]{(kibana-ref}/slack-action-type.html) -- [Swimlane]{(kibana-ref}/swimlane-action-type.html) -- [Tines]{(kibana-ref}/tines-action-type.html) -- [Torq]{(kibana-ref}/torq-action-type.html) -- [Webhook]{(kibana-ref}/webhook-action-type.html) -- [Webhook - Case Management]{(kibana-ref}/cases-webhook-action-type.html) -- [xMatters]{(kibana-ref}/xmatters-action-type.html) */ +- [D3 Security]({kibana-ref}/d3security-action-type.html) +- [Email]({kibana-ref}/email-action-type.html) +- [Generative AI]({kibana-ref}/gen-ai-action-type.html) +- [IBM Resilient]({kibana-ref}/resilient-action-type.html) +- [Index]({kibana-ref}/index-action-type.html) +- [Jira]({kibana-ref}/jira-action-type.html) +- [Microsoft Teams]({kibana-ref}/teams-action-type.html) +- [Opsgenie]({kibana-ref}/opsgenie-action-type.html) +- [PagerDuty]({kibana-ref}/pagerduty-action-type.html) +- [ServerLog]({kibana-ref}/server-log-action-type.html) +- [ServiceNow ITSM]({kibana-ref}/servicenow-action-type.html) +- [ServiceNow SecOps]({kibana-ref}/servicenow-sir-action-type.html) +- [ServiceNow ITOM]({kibana-ref}/servicenow-itom-action-type.html) +- [Slack]({kibana-ref}/slack-action-type.html) +- [Swimlane]({kibana-ref}/swimlane-action-type.html) +- [Tines]({kibana-ref}/tines-action-type.html) +- [Torq]({kibana-ref}/torq-action-type.html) +- [Webhook]({kibana-ref}/webhook-action-type.html) +- [Webhook - Case Management]({kibana-ref}/cases-webhook-action-type.html) +- [xMatters]({kibana-ref}/xmatters-action-type.html) */ //// // [cols="2"] @@ -241,7 +241,7 @@ Actions are instantiations of a connector that are linked to rules and run as ba Some connector types are paid commercial features, while others are free. For a comparison of the Elastic subscription levels, go to -[the subscription page]{(subscriptions}). +[the subscription page]({subscriptions}).
*/ //// diff --git a/serverless/pages/project-settings/data-views.asciidoc b/serverless/pages/project-settings/data-views.asciidoc index ae51d6c304..870e60ac9e 100644 --- a/serverless/pages/project-settings/data-views.asciidoc +++ b/serverless/pages/project-settings/data-views.asciidoc @@ -111,7 +111,7 @@ For an example, refer to Date: Wed, 30 Oct 2024 10:30:08 -0500 Subject: [PATCH 10/25] fix connections page, add missing security pages --- serverless/index.asciidoc | 4 +- .../action-connectors.asciidoc | 195 +++--------------- 2 files changed, 29 insertions(+), 170 deletions(-) diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index aa7c0c13d8..2e2164ac66 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -348,7 +348,7 @@ include::{security-serverless}/endpoint-response-actions/third-party-actions.asc include::{security-serverless}/endpoint-response-actions/response-actions-config.asciidoc[leveloffset=+3] include::{security-serverless}/cloud-native-security/cloud-native-security-overview.asciidoc[leveloffset=+2] -// include::{security-serverless}/cloud-native-security/security-posture-management.asciidoc[leveloffset=+3] +include::{security-serverless}/cloud-native-security/security-posture-management.asciidoc[leveloffset=+3] include::{security-serverless}/cloud-native-security/enable-cloudsec.asciidoc[leveloffset=+3] include::{security-serverless}/cloud-native-security/cspm.asciidoc[leveloffset=+3] include::{security-serverless}/cloud-native-security/cspm-get-started.asciidoc[leveloffset=+4] @@ -357,7 +357,7 @@ include::{security-serverless}/cloud-native-security/cspm-get-started-azure.asci // include::{security-serverless}/cloud-native-security/cspm-findings-page.asciidoc[leveloffset=+4] // include::{security-serverless}/cloud-native-security/benchmark-rules.asciidoc[leveloffset=+4] // include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+4] -// include::{security-serverless}/cloud-native-security/cspm-security-posture-faq.asciidoc[leveloffset=+4] +include::{security-serverless}/cloud-native-security/cspm-security-posture-faq.asciidoc[leveloffset=+4] include::{security-serverless}/cloud-native-security/kspm.asciidoc[leveloffset=+3] include::{security-serverless}/cloud-native-security/get-started-with-kspm.asciidoc[leveloffset=+4] include::{security-serverless}/cloud-native-security/cspm-findings-page.asciidoc[leveloffset=+4] diff --git a/serverless/pages/project-settings/action-connectors.asciidoc b/serverless/pages/project-settings/action-connectors.asciidoc index f3ff58cb5f..b20153bb23 100644 --- a/serverless/pages/project-settings/action-connectors.asciidoc +++ b/serverless/pages/project-settings/action-connectors.asciidoc @@ -10,174 +10,33 @@ This content applies to: {es-badge} {obs-badge} {sec-badge} The list of available connectors varies by project type. - +* {kibana-ref}/bedrock-action-type.html[Amazon Bedrock]: Send a request to Amazon Bedrock. +* {kibana-ref}/cases-action-type.html[Cases]: Add alerts to cases. +* {kibana-ref}/crowdstrike-action-type.html[CrowdStrike]: Send a request to CrowdStrike. +* {kibana-ref}/d3security-action-type.html[D3 Security]: Create an event or trigger playbook workflow actions in D3 SOAR. +* {kibana-ref}/email-action-type.html[Email]: Send email from your server. +* https://www.elastic.co/guide/en/kibana/master/gemini-action-type.html[Google Gemini]: Send a request to Google Gemini. +* {kibana-ref}/resilient-action-type.html[IBM Resilient]: Create an incident in IBM Resilient. +* {kibana-ref}/index-action-type.html[Index]: Index data into Elasticsearch. +* {kibana-ref}/jira-action-type.html[Jira]: Create an incident in Jira. +* {kibana-ref}/teams-action-type.html[Microsoft Teams]: Send a message to a Microsoft Teams channel. +* {kibana-ref}/obs-ai-assistant-action-type.html[Observability AI Assistant]: Add AI-driven insights and custom actions to your workflow. +* {kibana-ref}/openai-action-type.html[OpenAI]: Send a request to OpenAI. +* {kibana-ref}/opsgenie-action-type.html[Opsgenie]: Create or close an alert in Opsgenie. +* {kibana-ref}/pagerduty-action-type.html[PagerDuty]: Send an event in PagerDuty. +* {kibana-ref}/sentinelone-action-type.html[SentinelOne]: Perform response actions on SentinelOne-protected hosts. +* {kibana-ref}/server-log-action-type.html[ServerLog]: Add a message to a Kibana log. +* {kibana-ref}/servicenow-itom-action-type.html[ServiceNow ITOM]: Create an event in ServiceNow ITOM. +* {kibana-ref}/servicenow-action-type.html[ServiceNow ITSM]: Create an incident in ServiceNow ITSM. +* {kibana-ref}/servicenow-sir-action-type.html[ServiceNow SecOps]: Create a security incident in ServiceNow SecOps. +* {kibana-ref}/slack-action-type.html[Slack]: Send messages to Slack channels. +* {kibana-ref}/swimlane-action-type.html[Swimlane]: Create records in Swimlane. +* {kibana-ref}/thehive-action-type.html[TheHive]: Create cases and alerts in TheHive. +* {kibana-ref}/tines-action-type.html[Tines]: Send events to a story. +* {kibana-ref}/torq-action-type.html[Torq]: Trigger a Torq workflow. +* {kibana-ref}/webhook-action-type.html[Webhook]: Send a request to a web service. +* {kibana-ref}/cases-webhook-action-type.html[Webhook - Case Management]: Send a request to a Case Management web service. +* {kibana-ref}/xmatters-action-type.html[xMatters]: Trigger an xMatters workflow. //// /* Connectors provide a central place to store connection information for services and integrations with third party systems. From bfa6532a9b5eeac6038be3bab29ea9d616904b90 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Wed, 30 Oct 2024 12:44:16 -0500 Subject: [PATCH 11/25] qa general --- serverless/index.asciidoc | 9 ++++++ ...h-your-data-semantic-search-elser.asciidoc | 2 +- serverless/pages/general/index.asciidoc | 31 ------------------- .../manage-access-to-org-user-roles.asciidoc | 6 ++-- .../manage-billing-pricing-model.asciidoc | 6 ++-- .../pages/general/manage-billing.asciidoc | 2 +- serverless/pages/general/manage-billing.mdx | 2 +- serverless/pages/general/manage-org.asciidoc | 6 ++-- serverless/pages/general/sign-up.asciidoc | 8 ++--- serverless/pages/general/sign-up.mdx | 2 +- .../pages/general/visualize-library.asciidoc | 4 +-- .../pages/general/what-is-serverless.asciidoc | 8 ++--- .../pages/general/what-is-serverless.mdx | 6 ++-- .../pages/welcome-to-serverless.asciidoc | 2 +- 14 files changed, 37 insertions(+), 57 deletions(-) delete mode 100644 serverless/pages/general/index.asciidoc diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index 2e2164ac66..c446dd6687 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -14,6 +14,7 @@ include::{asciidoc-dir}/../../shared/attributes.asciidoc[] :obs-badge: <> :sec-badge: <> +// These are only available in serverless :es3: Elasticsearch Serverless :project-settings: Project settings :manage-app: Management @@ -31,6 +32,14 @@ include::{asciidoc-dir}/../../shared/attributes.asciidoc[] :alerts-app: Alerts :serverlessCustomRoles: true +// The values of these attributes are different in stateful vs serverless +:serverless-full: Elastic Cloud Serverless +:serverless-short: Serverless +:es: Elasticsearch Serverless +:es-serverless: Elasticsearch Serverless +:observability: Elastic Observability Serverless +:security: Elastic Security Serverless + = Serverless [[intro]] diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc index 20717ed300..808b543f6c 100644 --- a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc +++ b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc @@ -1,5 +1,5 @@ [[elasticsearch-reference-semantic-search-elser]] -= "Tutorial: Semantic search with ELSER" += Tutorial: Semantic search with ELSER :description: Perform semantic search using ELSER, an NLP model trained by Elastic. :keywords: elasticsearch, elser, semantic search diff --git a/serverless/pages/general/index.asciidoc b/serverless/pages/general/index.asciidoc deleted file mode 100644 index b73810d106..0000000000 --- a/serverless/pages/general/index.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -= Welcome to Elastic serverless - -include::./what-is-serverless.asciidoc[leveloffset=+1] - -include::./sign-up.asciidoc[leveloffset=+1] - -include::./manage-org.asciidoc[leveloffset=+1] -include::./manage-access-to-org.asciidoc[leveloffset=+2] -include::./manage-access-to-org-user-roles.asciidoc[leveloffset=+2] -include::./manage-access-to-org-from-existing-account.asciidoc[leveloffset=+2] - -include::./manage-your-project.asciidoc[leveloffset=+1] -include::./manage-your-project-rest-api.asciidoc[leveloffset=+2] - -include::./manage-billing.asciidoc[leveloffset=+1] -include::./manage-billing-check-subscription.asciidoc[leveloffset=+2] -include::./manage-billing-monitor-usage.asciidoc[leveloffset=+2] -include::./manage-billing-history.asciidoc[leveloffset=+2] -include::./manage-billing-pricing-model.asciidoc[leveloffset=+2] -include::./manage-billing-stop-project.asciidoc[leveloffset=+2] - -include::./service-status.asciidoc[leveloffset=+1] - -include::./user-profile.asciidoc[leveloffset=+1] - -include::./cloud-regions.asciidoc[leveloffset=+1] diff --git a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc index 7fff99b08d..9c632a7f96 100644 --- a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc +++ b/serverless/pages/general/manage-access-to-org-user-roles.asciidoc @@ -36,9 +36,11 @@ For example, you can assign a user the developer role for a specific {es-serverl [role="screenshot"] image::images/individual-role.png[Individual role] -ifeval::["{serverlessCustomRoles}" == "true"] -You can also optionally https://www.elastic.co/docs/current/serverless/custom-roles[create custom roles in a project]. +ifdef::serverlessCustomRoles[] + +You can also optionally <>. To assign a custom role to users, go to "Instance access roles" and select it from the list under the specific project it was created in. + endif::[] [discrete] diff --git a/serverless/pages/general/manage-billing-pricing-model.asciidoc b/serverless/pages/general/manage-billing-pricing-model.asciidoc index 9ab07fbf51..0da3fd6a08 100644 --- a/serverless/pages/general/manage-billing-pricing-model.asciidoc +++ b/serverless/pages/general/manage-billing-pricing-model.asciidoc @@ -17,9 +17,9 @@ Elastic Cloud serverless billing is based on your usage across these dimensions: To learn about billing dimensions for specific offerings, refer to: -* https://www.elastic.co/docs/current/serverless/elasticsearch/elasticsearch-billing[] -* https://www.elastic.co/docs/current/serverless/observability/observability-billing[] -* https://www.elastic.co/docs/current/serverless/security/security-billing[] +* <> +* <> +* <> [discrete] [[add-ons]] diff --git a/serverless/pages/general/manage-billing.asciidoc b/serverless/pages/general/manage-billing.asciidoc index 44c4685784..295b3e6e6f 100644 --- a/serverless/pages/general/manage-billing.asciidoc +++ b/serverless/pages/general/manage-billing.asciidoc @@ -13,7 +13,7 @@ preview:[] .Serverless billing starts June 1, 2024 [IMPORTANT] ==== -Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the https://cloud.elastic.co/billing/usage?[Billing Usage page]. Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. +Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the https://cloud.elastic.co/billing/usage[Billing Usage page]. Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. ==== You can manage the billing details of your organization directly from the Elastic Cloud console. diff --git a/serverless/pages/general/manage-billing.mdx b/serverless/pages/general/manage-billing.mdx index 9ab508dd7a..5990f55e7f 100644 --- a/serverless/pages/general/manage-billing.mdx +++ b/serverless/pages/general/manage-billing.mdx @@ -8,7 +8,7 @@ tags: [ 'serverless', 'general', 'billing', 'overview' ] - Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the [Billing Usage page](https://cloud.elastic.co/billing/usage?). Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. + Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the [Billing Usage page](https://cloud.elastic.co/billing/usage). Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. You can manage the billing details of your organization directly from the Elastic Cloud console. diff --git a/serverless/pages/general/manage-org.asciidoc b/serverless/pages/general/manage-org.asciidoc index 31da7a3451..792f2607bf 100644 --- a/serverless/pages/general/manage-org.asciidoc +++ b/serverless/pages/general/manage-org.asciidoc @@ -10,6 +10,6 @@ When you sign up to Elastic Cloud, you create an **organization**. This organization is the umbrella for all of your Elastic Cloud resources, users, and account settings. Every organization has a unique identifier. Bills are invoiced according to the billing contact and details that you set for your organization. -* <> -* <> -* <> +* <>: Add members to your organization and projects. +* <>: Configure the billing details of your organization. +* <>: Configure project-wide features and usage. diff --git a/serverless/pages/general/sign-up.asciidoc b/serverless/pages/general/sign-up.asciidoc index f6eec4b81e..a2f9b972f4 100644 --- a/serverless/pages/general/sign-up.asciidoc +++ b/serverless/pages/general/sign-up.asciidoc @@ -24,9 +24,9 @@ Each deployment includes Elastic features such as Maps, SIEM, machine learning, Serverless projects package Elastic Stack features by type of solution: -* https://www.elastic.co/docs/current/serverless/elasticsearch/what-is-elasticsearch-serverless[Elasticsearch] -* https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[Observability] -* https://www.elastic.co/docs/current/serverless/security/what-is-security-serverless[Security] +* <> +* <> +* <> When you create a project, you select the project type applicable to your use case, so only the relevant and impactful applications and features are easily accessible to you. @@ -63,7 +63,7 @@ To remove limitations, subscribe to https://www.elastic.co/guide/en/cloud/curren * Third availability zone for your deployments. * Access to additional features, such as cross-cluster search and cross-cluster replication. -You can subscribe to Elastic Cloud at any time during your trial. /serverless/general/serverless-billing[Billing] starts when you subscribe. To maximize the benefits of your trial, subscribe at the end of the free period. To monitor charges, anticipate future costs, and adjust your usage, check your https://www.elastic.co/guide/en/cloud/current/ec-account-usage.html[account usage] and https://www.elastic.co/guide/en/cloud/current/ec-billing-history.html[billing history]. +You can subscribe to Elastic Cloud at any time during your trial. <> starts when you subscribe. To maximize the benefits of your trial, subscribe at the end of the free period. To monitor charges, anticipate future costs, and adjust your usage, check your https://www.elastic.co/guide/en/cloud/current/ec-account-usage.html[account usage] and https://www.elastic.co/guide/en/cloud/current/ec-billing-history.html[billing history]. [discrete] [[general-sign-up-trial-how-do-i-get-started-with-my-trial]] diff --git a/serverless/pages/general/sign-up.mdx b/serverless/pages/general/sign-up.mdx index a539af235b..eb4aa29e06 100644 --- a/serverless/pages/general/sign-up.mdx +++ b/serverless/pages/general/sign-up.mdx @@ -60,7 +60,7 @@ To remove limitations, subscribe to [Elastic Cloud](https://www.elastic.co/guide - Third availability zone for your deployments. - Access to additional features, such as cross-cluster search and cross-cluster replication. -You can subscribe to Elastic Cloud at any time during your trial. [Billing](/serverless/general/serverless-billing) starts when you subscribe. To maximize the benefits of your trial, subscribe at the end of the free period. To monitor charges, anticipate future costs, and adjust your usage, check your [account usage](https://www.elastic.co/guide/en/cloud/current/ec-account-usage.html) and [billing history](https://www.elastic.co/guide/en/cloud/current/ec-billing-history.html). +You can subscribe to Elastic Cloud at any time during your trial. Billing starts when you subscribe. To maximize the benefits of your trial, subscribe at the end of the free period. To monitor charges, anticipate future costs, and adjust your usage, check your [account usage](https://www.elastic.co/guide/en/cloud/current/ec-account-usage.html) and [billing history](https://www.elastic.co/guide/en/cloud/current/ec-billing-history.html). ## How do I get started with my trial? diff --git a/serverless/pages/general/visualize-library.asciidoc b/serverless/pages/general/visualize-library.asciidoc index afbb9bef23..8131c37e5a 100644 --- a/serverless/pages/general/visualize-library.asciidoc +++ b/serverless/pages/general/visualize-library.asciidoc @@ -8,7 +8,7 @@ This content has been removed from the navigation for now because it's not useful in its current state.*/ //// -This content applies to: +This content applies to: {es-badge} {obs-badge} {sec-badge} The **Visualize Library** is a space where you can save visualization panels that you may want to use across multiple dashboards. The **Visualize Library** consists of two pages: @@ -19,7 +19,7 @@ The **Visualize Library** is a space where you can save visualization panels tha [[visualize-library-visualizations]] == Visualizations -By default the **Visualizations** page opens first. Here you can create new visualizations, or select from a list of previously created visualizations. To learn more, refer to https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-visualizations[save to the Visualize Library]. +By default the **Visualizations** page opens first. Here you can create new visualizations, or select from a list of previously created visualizations. To learn more, refer to <>. [discrete] [[visualize-library-annotation-groups]] diff --git a/serverless/pages/general/what-is-serverless.asciidoc b/serverless/pages/general/what-is-serverless.asciidoc index 7e2331c68d..59e0be7db2 100644 --- a/serverless/pages/general/what-is-serverless.asciidoc +++ b/serverless/pages/general/what-is-serverless.asciidoc @@ -15,7 +15,7 @@ data you ingest, more resources are allocated for that period of time. When the on your end. **Optimized data storage.** Your data is stored in cost-efficient, general storage. A cache layer is available on top of the general storage for recent and frequently queried data that provides faster search speed. -The size of the cache layer and the volume of data it holds depend on https://www.elastic.co/docs/current/serverless/elasticsearch/manage-project[settings] that you can configure for each project. +The size of the cache layer and the volume of data it holds depend on <> that you can configure for each project. **Dedicated experiences.** All serverless solutions are built on the Elastic Search Platform and include the core capabilities of the Elastic Stack. They also each offer a distinct experience and specific capabilities that help you focus on your data, goals, and use cases. @@ -24,7 +24,7 @@ The size of the cache layer and the volume of data it holds depend on https://ww .Serverless billing starts June 1, 2024 [IMPORTANT] ==== -Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the https://cloud.elastic.co/billing/usage?[Billing Usage page]. Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. +Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the https://cloud.elastic.co/billing/usage[Billing Usage page]. Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. ==== [discrete] @@ -36,7 +36,7 @@ Control your project data and query performance against your project data. **Data.** Choose the data you want to ingest, and the method to ingest it. By default, data is stored indefinitely in your project, and you define the retention settings for your data streams. -**Performance.** For granular control over costs and query performance against your project data, serverless projects come with a set of predefined https://www.elastic.co/docs/current/serverless/elasticsearch/manage-project[settings] that you can edit. +**Performance.** For granular control over costs and query performance against your project data, serverless projects come with a set of predefined <> that you can edit. .Some or all of these settings may not be available for all types of serverless projects. [NOTE] @@ -108,7 +108,7 @@ Migration paths between hosted deployments and serverless projects are currently **How can I move data to or from serverless projects?** -We are working on data migration tools! In the interim, you can https://www.elastic.co/docs/current/serverless/elasticsearch/ingest-data-through-logstash[use Logstash] with Elasticsearch input and output plugins to move data to and from serverless projects. +We are working on data migration tools! In the interim, you can <> with Elasticsearch input and output plugins to move data to and from serverless projects. **How does serverless ensure compatibility between software versions?** diff --git a/serverless/pages/general/what-is-serverless.mdx b/serverless/pages/general/what-is-serverless.mdx index 332bd8818a..ca804e52ac 100644 --- a/serverless/pages/general/what-is-serverless.mdx +++ b/serverless/pages/general/what-is-serverless.mdx @@ -23,7 +23,7 @@ The size of the cache layer and the volume of data it holds depend on - Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the [Billing Usage page](https://cloud.elastic.co/billing/usage?). Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. + Until May 31, 2024, your serverless consumption will not incur any charges, but will be visible along with your total Elastic Cloud consumption on the [Billing Usage page](https://cloud.elastic.co/billing/usage). Unless you are in a trial period, usage on or after June 1, 2024 will be deducted from your existing Elastic Cloud credits or be billed to your active payment method. ## Control your data and performance @@ -66,7 +66,7 @@ Migration paths between hosted deployments and serverless projects are currently **How can I move data to or from serverless projects?** -We are working on data migration tools! In the interim, you can use Logstash with Elasticsearch input and output plugins to move data to and from serverless projects. +We are working on data migration tools! In the interim, you can use Logstash with Elasticsearch input and output plugins to move data to and from serverless projects. **How does serverless ensure compatibility between software versions?** @@ -92,4 +92,4 @@ See serverless pricing information for [Search](https://www.elastic.co/pricing/s **Can I request backups or restores for my projects?** -It is not currently possible to request backups or restores for projects, but we are working on data migration tools to better support this. +It is not currently possible to request backups or restores for projects, but we are working on data migration tools to better support this. diff --git a/serverless/pages/welcome-to-serverless.asciidoc b/serverless/pages/welcome-to-serverless.asciidoc index a407e0c041..4c9314753f 100644 --- a/serverless/pages/welcome-to-serverless.asciidoc +++ b/serverless/pages/welcome-to-serverless.asciidoc @@ -10,7 +10,7 @@ ++++ -preview::[] +preview:[] Elastic serverless products allow you to deploy and use Elastic for your use cases without managing the underlying Elastic cluster, such as nodes, data tiers, and scaling. Serverless instances are fully-managed, autoscaled, and automatically upgraded by Elastic so you can From 6738c0871a774baa76cb517e5af00456a0fc5fba Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Wed, 30 Oct 2024 14:16:38 -0500 Subject: [PATCH 12/25] qa elasticsearch --- .../apis-elasticsearch-conventions.asciidoc | 2 +- .../elasticsearch/apis-http-apis.asciidoc | 4 +- .../apis-kibana-conventions.asciidoc | 4 +- .../clients-go-getting-started.asciidoc | 22 ++--- .../clients-go-getting-started.mdx | 22 ++--- .../clients-ruby-getting-started.asciidoc | 19 ++-- .../clients-ruby-getting-started.mdx | 1 - .../elasticsearch-developer-tools.asciidoc | 10 +- .../elasticsearch-developer-tools.mdx | 1 - .../explore-your-data-alerting.asciidoc | 4 +- ...lore-your-data-discover-your-data.asciidoc | 26 +++--- .../explore-your-data-discover-your-data.mdx | 93 +++++++++---------- .../pages/elasticsearch/get-started.asciidoc | 2 +- serverless/pages/elasticsearch/index.asciidoc | 53 ----------- ...ata-through-integrations-logstash.asciidoc | 4 +- .../pages/elasticsearch/knn-search.asciidoc | 4 +- serverless/pages/elasticsearch/knn-search.mdx | 4 +- .../search-with-synonyms.asciidoc | 2 +- ...h-your-data-semantic-search-elser.asciidoc | 4 +- ...search-your-data-semantic-search-elser.mdx | 4 +- .../search-your-data-semantic-search.asciidoc | 2 + .../serverless-differences.asciidoc | 2 +- 22 files changed, 114 insertions(+), 175 deletions(-) delete mode 100644 serverless/pages/elasticsearch/index.asciidoc diff --git a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc index 27b5167fc7..ec204ca246 100644 --- a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc +++ b/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc @@ -14,7 +14,7 @@ For example: GET _cat/indices?v=true ---- -Check out https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[]. +Check out <>. [discrete] [[elasticsearch-api-conventions-request-headers]] diff --git a/serverless/pages/elasticsearch/apis-http-apis.asciidoc b/serverless/pages/elasticsearch/apis-http-apis.asciidoc index 61628e0016..5d7feb08e7 100644 --- a/serverless/pages/elasticsearch/apis-http-apis.asciidoc +++ b/serverless/pages/elasticsearch/apis-http-apis.asciidoc @@ -6,6 +6,6 @@ preview:[] -* <> -* <> +* <>: The {es} REST APIs have conventions for headers and request bodies. +* <>: The Management APIs for {serverless-short} have request header conventions. * https://www.elastic.co/docs/api/[API Reference]: Explore the reference information for Elastic Serverless REST APIs diff --git a/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc b/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc index 67e49c4a81..a07af9335b 100644 --- a/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc +++ b/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc @@ -29,7 +29,7 @@ For example: GET kbn:/api/data_views ---- -Check out https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[]. +Check out <>. [discrete] [[elasticsearch-kibana-api-conventions-request-headers]] @@ -42,7 +42,7 @@ The Management APIs support the `Authorization`, `Content-Type`, and `kbn-xsrf` Management APIs use key-based authentication. You must create an API key and use the encoded value in the request header. -To learn about creating keys, go to https://www.elastic.co/docs/current/serverless/api-keys[]. +To learn about creating keys, go to <>. `Content-Type: application/json`:: diff --git a/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc index b5d80b40ae..ac807bcad9 100644 --- a/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc @@ -41,15 +41,15 @@ The following snippets use these imports: [source,go] ---- import ( - "context" - "encoding/json" - "fmt" - "log" - "strconv" - - "github.com/elastic/elasticsearch-serverless-go" - "github.com/elastic/elasticsearch-serverless-go/typedapi/types" - "github.com/elastic/elasticsearch-serverless-go/typedapi/types/enums/result" + "context" + "encoding/json" + "fmt" + "log" + "strconv" + + "github.com/elastic/elasticsearch-serverless-go" + "github.com/elastic/elasticsearch-serverless-go/typedapi/types" + "github.com/elastic/elasticsearch-serverless-go/typedapi/types/enums/result" ) ---- @@ -62,8 +62,8 @@ Initialize the client using your API key and Elasticsearch Endpoint: [source,go] ---- client, err := elasticsearch.NewClient(elasticsearch.Config{ - APIKey: "you_api_key", - Address: "https://my-project-url", + APIKey: "you_api_key", + Address: "https://my-project-url", }) if err != nil { log.Fatal(err) diff --git a/serverless/pages/elasticsearch/clients-go-getting-started.mdx b/serverless/pages/elasticsearch/clients-go-getting-started.mdx index b1bd84674a..f7c009857d 100644 --- a/serverless/pages/elasticsearch/clients-go-getting-started.mdx +++ b/serverless/pages/elasticsearch/clients-go-getting-started.mdx @@ -35,15 +35,15 @@ The following snippets use these imports: ```go import ( - "context" - "encoding/json" - "fmt" - "log" - "strconv" - - "github.com/elastic/elasticsearch-serverless-go" - "github.com/elastic/elasticsearch-serverless-go/typedapi/types" - "github.com/elastic/elasticsearch-serverless-go/typedapi/types/enums/result" + "context" + "encoding/json" + "fmt" + "log" + "strconv" + + "github.com/elastic/elasticsearch-serverless-go" + "github.com/elastic/elasticsearch-serverless-go/typedapi/types" + "github.com/elastic/elasticsearch-serverless-go/typedapi/types/enums/result" ) ``` @@ -54,8 +54,8 @@ Initialize the client using your API key and Elasticsearch Endpoint: ```go client, err := elasticsearch.NewClient(elasticsearch.Config{ - APIKey: "you_api_key", - Address: "https://my-project-url", + APIKey: "you_api_key", + Address: "https://my-project-url", }) if err != nil { log.Fatal(err) diff --git a/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc b/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc index 0ac75bb872..8fccb0ad2d 100644 --- a/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc +++ b/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc @@ -16,7 +16,6 @@ client for {es3}, shows you how to initialize the client, and how to perform bas * Ruby 3.0 or higher installed on your system. * To use the `elasticsearch-serverless` gem, you must have an API key and Elasticsearch Endpoint for an {es3} project. -* [discrete] [[elasticsearch-ruby-client-getting-started-installation]] @@ -126,7 +125,7 @@ index: [source,ruby] ---- # First, build your data: -\> body = [ +> body = [ { index: { _index: 'books', data: {name: "Snow Crash", author: "Neal Stephenson", release_date: "1992-06-01", page_count: 470} } }, { index: { _index: 'books', data: {name: "Revelation Space", author: "Alastair Reynolds", release_date: "2000-03-15", page_count: 585} } }, { index: { _index: 'books', data: {name: "1984", author: "George Orwell", release_date: "1949-06-08", page_count: 328} } }, @@ -135,9 +134,9 @@ index: { index: { _index: 'books', data: {name: "The Handmaid's Tale", author: "Margaret Atwood", release_date: "1985-06-01", page_count: 311} } } ] # Then ingest the data via the bulk API: -\> response = client.bulk(body: body) +> response = client.bulk(body: body) # You can check the response if the items are indexed and have a document (doc) ID: -\> response['items'] +> response['items'] # Returns: # => # [{"index"=>{"_index"=>"books", "_id"=>"Pdink4cBmDx329iqhzM2", "_version"=>1, "result"=>"created", "_shards"=>{"total"=>2, "successful"=>1, "failed"=>0}, "_seq_no"=>0, "_primary_term"=>1, "status"=>201}}, @@ -162,7 +161,7 @@ You can get documents by using the following code: [source,ruby] ---- -\> client.get(index: 'books', id: 'id') # Replace 'id' with a valid doc ID +> client.get(index: 'books', id: 'id') # Replace 'id' with a valid doc ID ---- [discrete] @@ -174,8 +173,8 @@ Now that some data is available, you can search your documents using the [source,ruby] ---- -\> response = client.search(index: 'books', q: 'snow') -\> response['hits']['hits'] +> response = client.search(index: 'books', q: 'snow') +> response['hits']['hits'] # Returns: # => [{"_index"=>"books", "_id"=>"Pdink4cBmDx329iqhzM2", "_score"=>1.5904956, "_source"=>{"name"=>"Snow Crash", "author"=>"Neal Stephenson", "release_date"=>"1992-06-01", "page_count"=>470}}] ---- @@ -188,7 +187,7 @@ You can call the `update` API to update a document: [source,ruby] ---- -\> response = client.update( +> response = client.update( index: 'books', id: 'id', # Replace 'id' with a valid doc ID body: { doc: { page_count: 312 } } @@ -203,7 +202,7 @@ You can call the `delete` API to delete a document: [source,ruby] ---- -\> client.delete(index: 'books', id: 'id') # Replace 'id' with a valid doc ID +> client.delete(index: 'books', id: 'id') # Replace 'id' with a valid doc ID ---- [discrete] @@ -212,5 +211,5 @@ You can call the `delete` API to delete a document: [source,ruby] ---- -\> client.indices.delete(index: 'books') +> client.indices.delete(index: 'books') ---- diff --git a/serverless/pages/elasticsearch/clients-ruby-getting-started.mdx b/serverless/pages/elasticsearch/clients-ruby-getting-started.mdx index cffd03381e..fc14b07d5d 100644 --- a/serverless/pages/elasticsearch/clients-ruby-getting-started.mdx +++ b/serverless/pages/elasticsearch/clients-ruby-getting-started.mdx @@ -15,7 +15,6 @@ client for ((es3)), shows you how to initialize the client, and how to perform b * Ruby 3.0 or higher installed on your system. * To use the `elasticsearch-serverless` gem, you must have an API key and Elasticsearch Endpoint for an ((es3)) project. -* ## Installation diff --git a/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc b/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc index d7fb3d77a0..41b5277626 100644 --- a/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc +++ b/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc @@ -6,15 +6,11 @@ preview:[] -[discrete] -[[elasticsearch-dev-tools-developer-tools]] -== Developer tools - A number of developer tools are available in your project's UI under the **Dev Tools** section. -* https://www.elastic.co/docs/current/serverless/devtools/run-api-requests-in-the-console[Console]: Make API calls to your Elasticsearch instance using the Query DSL and view the responses. -* https://www.elastic.co/docs/current/serverless/devtools/profile-queries-and-aggregations[Search Profiler]: Inspect and analyze your search queries to identify performance bottlenecks. -* https://www.elastic.co/docs/current/serverless/devtools/debug-grok-expressions[Grok Debugger]: Build and debug grok patterns before you use them in your data processing pipelines. +* <>: Make API calls to your Elasticsearch instance using the Query DSL and view the responses. +* <>: Inspect and analyze your search queries to identify performance bottlenecks. +* <>: Build and debug grok patterns before you use them in your data processing pipelines. // ## Troubleshooting diff --git a/serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx b/serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx index 2c58bf304d..929e0dcea8 100644 --- a/serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx +++ b/serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx @@ -6,7 +6,6 @@ tags: [ 'serverless', 'elasticsearch', 'overview' ] --- -## Developer tools A number of developer tools are available in your project's UI under the **Dev Tools** section. diff --git a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc index c96e30cf4f..c055cef4b7 100644 --- a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc @@ -20,7 +20,7 @@ In **{alerts-app}** or **{project-settings} → {manage-app} → {rules-app}** y [role="screenshot"] image::images/rules-ui.png[Example rule listing in {rules-ui}] -For an overview of alerting concepts, go to https://www.elastic.co/docs/current/serverless/rules[]. +For an overview of alerting concepts, go to <>. //// /* ## Required permissions @@ -80,7 +80,7 @@ When a rule is in a snoozed state, you can cancel or change the duration of this [[elasticsearch-explore-your-data-alerting-import-and-export-rules]] == Import and export rules -To import and export rules, use https://www.elastic.co/docs/current/serverless/saved-objects[saved objects]. +To import and export rules, use <>. //// /* diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc index 38d7b47353..6422146fe1 100644 --- a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc @@ -36,29 +36,29 @@ Tell {kib} where to find the data you want to explore, and then specify the time . Once the book sample data has been ingested, navigate to **Explore → Discover** and click **Create data view**. . Give your data view a name. - ++ [role="screenshot"] image::images/create-data-view.png[Create a data view] - ++ . Start typing in the **Index pattern** field, and the names of indices, data streams, and aliases that match your input will be displayed. - ++ * To match multiple sources, use a wildcard (*), for example, `b*` and any indices starting with the letter `b` display. * To match multiple sources, enter their names separated by a comma. Do not include a space after the comma. For example `books,magazines` would match two indices: `books` and `magazines`. * To exclude a source, use a minus sign (-), for example `-books`. - ++ . In the **Timestamp** field dropdown, and then select `release_date`. - ++ * If you don't set a time field, you can't use global time filters on your dashboards. Leaving the time field unset might be useful if you have multiple time fields and want to create dashboards that combine visualizations based on different timestamps. * If your index doesn't have time-based data, choose **I don't want to use the time filter**. - ++ . Click **Show advanced settings** to: - ++ * Display hidden and system indices. * Specify your own data view name. For example, enter your Elasticsearch index alias name. - ++ . Click **Save data view to {kib}**. . Adjust the time range to view data for the **Last 40 years** to view all your book data. - ++ [role="screenshot"] image::images/book-data.png[Your book data displayed] @@ -70,9 +70,9 @@ image::images/book-data.png[Your book data displayed] . In the sidebar, enter `au` in the search field to find the `author` field. . In the **Available fields** list, click `author` to view its most popular values. - ++ **Discover** shows the top 10 values and the number of records used to calculate those values. - ++ . Click image:images/icons/plusInCircleFilled.svg[Add] to toggle the field into the document table. You can also drag the field from the **Available fields** list into the document table. [discrete] @@ -169,12 +169,12 @@ If a field can be {ref}/search-aggregations.html[aggregated], you can quickly vi . In the sidebar, find and then click `release_date`. . In the popup, click **Visualize**. - ++ [NOTE] ==== {kib} creates a visualization best suited for this field. ==== - ++ . From the **Available fields** list, drag and drop `page_count` onto the workspace. . Save your visualization for use on a dashboard. diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx index 8fe1a34a3c..428051ca15 100644 --- a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx +++ b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx @@ -32,34 +32,31 @@ Tell ((kib)) where to find the data you want to explore, and then specify the ti 1. Once the book sample data has been ingested, navigate to **Explore → Discover** and click **Create data view**. -2. Give your data view a name. +1. Give your data view a name. - + -3. Start typing in the **Index pattern** field, and the names of indices, data streams, and aliases that match your input will be displayed. +1. Start typing in the **Index pattern** field, and the names of indices, data streams, and aliases that match your input will be displayed. -- To match multiple sources, use a wildcard (*), for example, `b*` and any indices starting with the letter `b` display. + - To match multiple sources, use a wildcard (*), for example, `b*` and any indices starting with the letter `b` display. + - To match multiple sources, enter their names separated by a comma. Do not include a space after the comma. For example `books,magazines` would match two indices: `books` and `magazines`. + - To exclude a source, use a minus sign (-), for example `-books`. -- To match multiple sources, enter their names separated by a comma. Do not include a space after the comma. For example `books,magazines` would match two indices: `books` and `magazines`. +1. In the **Timestamp** field dropdown, and then select `release_date`. -- To exclude a source, use a minus sign (-), for example `-books`. + - If you don't set a time field, you can't use global time filters on your dashboards. Leaving the time field unset might be useful if you have multiple time fields and want to create dashboards that combine visualizations based on different timestamps. + - If your index doesn't have time-based data, choose **I don't want to use the time filter**. -4. In the **Timestamp** field dropdown, and then select `release_date`. +1. Click **Show advanced settings** to: -- If you don't set a time field, you can't use global time filters on your dashboards. Leaving the time field unset might be useful if you have multiple time fields and want to create dashboards that combine visualizations based on different timestamps. + - Display hidden and system indices. + - Specify your own data view name. For example, enter your Elasticsearch index alias name. -- If your index doesn't have time-based data, choose **I don't want to use the time filter**. +1. Click **Save data view to ((kib))**. -5. Click **Show advanced settings** to: +1. Adjust the time range to view data for the **Last 40 years** to view all your book data. -- Display hidden and system indices. -- Specify your own data view name. For example, enter your Elasticsearch index alias name. - -6. Click **Save data view to ((kib))**. - -7. Adjust the time range to view data for the **Last 40 years** to view all your book data. - - +
@@ -69,11 +66,11 @@ Tell ((kib)) where to find the data you want to explore, and then specify the ti 1. In the sidebar, enter `au` in the search field to find the `author` field. -2. In the **Available fields** list, click `author` to view its most popular values. +1. In the **Available fields** list, click `author` to view its most popular values. -**Discover** shows the top 10 values and the number of records used to calculate those values. + **Discover** shows the top 10 values and the number of records used to calculate those values. -3. Click to toggle the field into the document table. You can also drag the field from the **Available fields** list into the document table. +1. Click to toggle the field into the document table. You can also drag the field from the **Available fields** list into the document table. ## Add a field to your ((data-source)) @@ -85,21 +82,21 @@ the same way you do with other fields. 1. In the sidebar, click **Add a field**. -2. In the **Create field** form, enter `hello` for the name. +1. In the **Create field** form, enter `hello` for the name. -3. Turn on **Set value**. +1. Turn on **Set value**. -4. Define the script using the Painless scripting language. Runtime fields require an `emit()`. +1. Define the script using the Painless scripting language. Runtime fields require an `emit()`. ```ts emit("Hello World!"); ``` -5. Click **Save**. +1. Click **Save**. -6. In the sidebar, search for the **hello** field, and then add it to the document table. +1. In the sidebar, search for the **hello** field, and then add it to the document table. -7. Create a second field named `authorabbrev` that combines the authors last name and first initial. +1. Create a second field named `authorabbrev` that combines the authors last name and first initial. ```ts String str = doc['author.keyword'].value; @@ -107,7 +104,7 @@ the same way you do with other fields. emit(doc['author.keyword'].value + ", " + ch1); ``` -8. Add `authorabbrev` to the document table. +1. Add `authorabbrev` to the document table. @@ -122,7 +119,7 @@ To search particular fields and build more complex queries, use the ((kib)) Quer Search the book data to find out which books have more than 500 pages: 1. Enter `p`, and then select **page_count**. -2. Select **>** for greater than and enter **500**, then click the refresh button or press the Enter key to see which books have more than 500 pages. +1. Select **>** for greater than and enter **500**, then click the refresh button or press the Enter key to see which books have more than 500 pages.
@@ -136,10 +133,10 @@ and more. Exclude documents where the author is not Terry Pratchett: 1. Click next to the query bar. -2. In the **Add filter** pop-up, set the field to **author**, the operator to **is not**, and the value to **Terry Pratchett**. -3. Click **Add filter**. -4. Continue your exploration by adding more filters. -5. To remove a filter, click the close icon (x) next to its name in the filter bar. +1. In the **Add filter** pop-up, set the field to **author**, the operator to **is not**, and the value to **Terry Pratchett**. +1. Click **Add filter**. +1. Continue your exploration by adding more filters. +1. To remove a filter, click the close icon (x) next to its name in the filter bar.
@@ -149,11 +146,11 @@ Dive into an individual document to view its fields and the documents that occur 1. In the document table, click the expand icon to show document details. -2. Scan through the fields and their values. If you find a field of interest, hover your mouse over the **Actions** column for filters and other options. +1. Scan through the fields and their values. If you find a field of interest, hover your mouse over the **Actions** column for filters and other options. -3. To create a view of the document that you can bookmark and share, click **Single document**. +1. To create a view of the document that you can bookmark and share, click **Single document**. -4. To view documents that occurred before or after the event you are looking at, click **Surrounding documents**. +1. To view documents that occurred before or after the event you are looking at, click **Surrounding documents**. @@ -163,26 +160,26 @@ Save your search so you can use it later to generate a CSV report, create visual 1. In the upper right toolbar, click **Save**. -2. Give your search a title. +1. Give your search a title. -3. Optionally store tags and the time range with the search. +1. Optionally store tags and the time range with the search. -4. Click **Save**. +1. Click **Save**. ## Visualize your findings If a field can be [aggregated](((ref))/search-aggregations.html), you can quickly visualize it from **Discover**. 1. In the sidebar, find and then click `release_date`. -2. In the popup, click **Visualize**. +1. In the popup, click **Visualize**. - - ((kib)) creates a visualization best suited for this field. - + + ((kib)) creates a visualization best suited for this field. + -3. From the **Available fields** list, drag and drop `page_count` onto the workspace. +1. From the **Available fields** list, drag and drop `page_count` onto the workspace. -4. Save your visualization for use on a dashboard. +1. Save your visualization for use on a dashboard. For geographical point fields, if you click **Visualize**, your data appears in a map. @@ -201,12 +198,12 @@ From **Discover**, you can create a rule to periodically check when data goes ab 1. Ensure that your data view, query, and filters fetch the data for which you want an alert. -2. In the toolbar, click **Alerts → Create search threshold rule**. +1. In the toolbar, click **Alerts → Create search threshold rule**. The **Create rule** form is pre-filled with the latest query sent to ((es)). -3. Configure your ((es)) query and select a connector type. +1. Configure your ((es)) query and select a connector type. -4. Click **Save**. +1. Click **Save**. For more about this and other rules provided in ((alert-features)), go to Alerting. diff --git a/serverless/pages/elasticsearch/get-started.asciidoc b/serverless/pages/elasticsearch/get-started.asciidoc index 2dded24f26..6b176b53a1 100644 --- a/serverless/pages/elasticsearch/get-started.asciidoc +++ b/serverless/pages/elasticsearch/get-started.asciidoc @@ -22,7 +22,7 @@ Use your {ecloud} account to create a fully-managed {es} project: + ** **General purpose**. For general search use cases across various data types. ** **Optimized for Vectors**. For search use cases using vectors and near real-time retrieval. -. Provide a name for the project and optionally edit the project settings, such as the cloud platform https://www.elastic.co/docs/current/serverless/regions[region]. +. Provide a name for the project and optionally edit the project settings, such as the cloud platform <>. Select **Create project** to continue. . Once the project is ready, select **Continue**. diff --git a/serverless/pages/elasticsearch/index.asciidoc b/serverless/pages/elasticsearch/index.asciidoc deleted file mode 100644 index cc4100c405..0000000000 --- a/serverless/pages/elasticsearch/index.asciidoc +++ /dev/null @@ -1,53 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -= Elasticsearch - -include::./what-is-elasticsearch-serverless.asciidoc[leveloffset=+1] - -include::./pricing.asciidoc[leveloffset=+1] - -include::./get-started.asciidoc[leveloffset=+1] - -include::./clients.asciidoc[leveloffset=+1] -include::./clients-go-getting-started.asciidoc[leveloffset=+2] -include::./clients-java-getting-started.asciidoc[leveloffset=+2] -include::./clients-dot-net-getting-started.asciidoc[leveloffset=+2] -include::./clients-nodejs-getting-started.asciidoc[leveloffset=+2] -include::./clients-php-getting-started.asciidoc[leveloffset=+2] -include::./clients-python-getting-started.asciidoc[leveloffset=+2] -include::./clients-ruby-getting-started.asciidoc[leveloffset=+2] - -include::./apis-http-apis.asciidoc[leveloffset=+1] -include::./apis-elasticsearch-conventions.asciidoc[leveloffset=+2] -include::./apis-kibana-conventions.asciidoc[leveloffset=+2] - -include::./elasticsearch-developer-tools.asciidoc[leveloffset=+1] - -include::./ingest-your-data.asciidoc[leveloffset=+1] -include::./ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+2] -include::./ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+2] -include::./ingest-your-data-upload-file.asciidoc[leveloffset=+2] -include::./ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+2] -include::./ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+2] - -include::./search-your-data.asciidoc[leveloffset=+1] -include::./search-your-data-the-search-api.asciidoc[leveloffset=+2] -include::./search-with-synonyms.asciidoc[leveloffset=+2] -include::./knn-search.asciidoc[leveloffset=+2] -include::./search-your-data-semantic-search.asciidoc[leveloffset=+2] -include::./search-your-data-semantic-search-elser.asciidoc[leveloffset=+3] - -include::./explore-your-data.asciidoc[leveloffset=+1] -include::./explore-your-data-the-aggregations-api.asciidoc[leveloffset=+2] -include::./explore-your-data-discover-your-data.asciidoc[leveloffset=+2] -include::./explore-your-data-visualize-your-data.asciidoc[leveloffset=+2] -include::./explore-your-data-alerting.asciidoc[leveloffset=+2] - -include::./search-playground.asciidoc[leveloffset=+1] - -include::./serverless-differences.asciidoc[leveloffset=+1] - -include::./technical-preview-limitations.asciidoc[leveloffset=+1] diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc index 5467de5e91..c4d644cd37 100644 --- a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc +++ b/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc @@ -28,7 +28,7 @@ Some differences to note between {es3} and self-managed {es}: User-based security settings are ignored and may cause errors. * {es3} uses **{dlm} ({dlm-init})** instead of {ilm} ({ilm-init}). If you add {ilm-init} settings to your {es} output configuration, they are ignored and may cause errors. -* **{ls} monitoring** for {serverless-short} is available through the https://github.com/elastic/integrations/blob/main/packages/logstash/_dev/build/docs/README.md[{ls} Integration] in https://www.elastic.co/docs/current/serverless/observability/what-is-observability-serverless[Elastic Observability]. +* **{ls} monitoring** for {serverless-short} is available through the https://github.com/elastic/integrations/blob/main/packages/logstash/_dev/build/docs/README.md[{ls} Integration] in <>. **Known issue** @@ -61,7 +61,7 @@ No additional SSL configuration steps are needed. [[elasticsearch-ingest-data-through-logstash-api-keys-for-connecting-ls-to-es3]] == API keys for connecting {ls} to {es3} -Use the **Security: API key** section in the UI to https://www.elastic.co/docs/current/serverless/api-keys[create an API key] +Use the **Security: API key** section in the UI to <> for securely connecting the {ls} {es} output to {es3}. We recommend creating a unique API key per {ls} instance. You can create as many API keys as necessary. diff --git a/serverless/pages/elasticsearch/knn-search.asciidoc b/serverless/pages/elasticsearch/knn-search.asciidoc index acc5e00ebe..587035ecb9 100644 --- a/serverless/pages/elasticsearch/knn-search.asciidoc +++ b/serverless/pages/elasticsearch/knn-search.asciidoc @@ -356,7 +356,7 @@ shards. The score of each hit is the sum of the `knn` and `query` scores. You can specify a `boost` value to give a weight to each score in the sum. In the example above, the scores will be calculated as -[source] +[source,txt] ---- score = 0.9 * match_score + 0.1 * knn_score ---- @@ -480,7 +480,7 @@ all index shards. The scoring for a doc with the above configured boosts would be: -[source] +[source,txt] ---- score = 0.9 * match_score + 0.1 * knn_score_image-vector + 0.5 * knn_score_title-vector ---- diff --git a/serverless/pages/elasticsearch/knn-search.mdx b/serverless/pages/elasticsearch/knn-search.mdx index 96282a3a2c..8c6990021d 100644 --- a/serverless/pages/elasticsearch/knn-search.mdx +++ b/serverless/pages/elasticsearch/knn-search.mdx @@ -331,7 +331,7 @@ shards. The score of each hit is the sum of the `knn` and `query` scores. You can specify a `boost` value to give a weight to each score in the sum. In the example above, the scores will be calculated as -``` +```txt score = 0.9 * match_score + 0.1 * knn_score ``` @@ -446,7 +446,7 @@ all index shards. The scoring for a doc with the above configured boosts would be: -``` +```txt score = 0.9 * match_score + 0.1 * knn_score_image-vector + 0.5 * knn_score_title-vector ``` diff --git a/serverless/pages/elasticsearch/search-with-synonyms.asciidoc b/serverless/pages/elasticsearch/search-with-synonyms.asciidoc index 7d5d022065..fd459c62a2 100644 --- a/serverless/pages/elasticsearch/search-with-synonyms.asciidoc +++ b/serverless/pages/elasticsearch/search-with-synonyms.asciidoc @@ -70,7 +70,7 @@ Check each synonym token filter documentation for configuration details and inst === Test your analyzer You can test an analyzer configuration without modifying your index settings. -Use the ((ref)/indices-analyze.html)[analyze API] to test your analyzer chain: +Use the {ref}/indices-analyze.html[analyze API] to test your analyzer chain: [source,bash] ---- diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc index 808b543f6c..186ff0242c 100644 --- a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc +++ b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc @@ -325,11 +325,11 @@ search results. [discrete] [[optimization]] -= Optimizing performance +== Optimizing performance [discrete] [[save-space]] -== Saving disk space by excluding the ELSER tokens from document source +=== Saving disk space by excluding the ELSER tokens from document source The tokens generated by ELSER must be indexed for use in the {ref}/query-dsl-sparse-vector-query.html[sparse_vector query]. However, it is not diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx index a6699e74ee..b2e95859fc 100644 --- a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx +++ b/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx @@ -309,11 +309,11 @@ search results.
-# Optimizing performance +## Optimizing performance
-## Saving disk space by excluding the ELSER tokens from document source +### Saving disk space by excluding the ELSER tokens from document source The tokens generated by ELSER must be indexed for use in the [sparse_vector query](((ref))/query-dsl-sparse-vector-query.html). However, it is not diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc b/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc index d64f9fa3fa..271fc64081 100644 --- a/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc +++ b/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc @@ -16,6 +16,8 @@ enables it to extract text embeddings out of text. Embeddings are vectors that provide a numeric representation of a text. Pieces of content with similar meaning have similar representations. +image::images/vector-search-oversimplification.png[A simplified representation of encoding textual concepts as vectors] + _A simplified representation of encoding textual concepts as vectors_ At query time, {es} can use the same NLP model to convert a query into diff --git a/serverless/pages/elasticsearch/serverless-differences.asciidoc b/serverless/pages/elasticsearch/serverless-differences.asciidoc index 3dcbf9dc71..2625abfb64 100644 --- a/serverless/pages/elasticsearch/serverless-differences.asciidoc +++ b/serverless/pages/elasticsearch/serverless-differences.asciidoc @@ -13,7 +13,7 @@ preview:[] Some features that are available in Elastic Cloud Hosted and self-managed offerings are not available in serverless {es}. These features have either been replaced by a new feature, or are not applicable in the new Serverless architecture: -* **Index lifecycle management ({ilm-init})** is not available, in favor of **https://www.elastic.co/docs/current/serverless/index-management[data stream lifecycle]**. +* **Index lifecycle management ({ilm-init})** is not available, in favor of <>. + In an Elastic Cloud Hosted or self-managed environment, {ilm-init} lets you automatically transition indices through data tiers according to your performance needs and retention requirements. This allows you to balance hardware costs with performance. Serverless Elasticsearch eliminates this From 54e69925f40d7a3cc5533536e29996750012906b Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Thu, 31 Oct 2024 16:17:59 -0500 Subject: [PATCH 13/25] qa devtools --- serverless/images/es-badge.svg | 2 +- serverless/images/obs-badge.svg | 2 +- serverless/images/sec-badge.svg | 6 +++--- .../devtools/debug-grok-expressions.asciidoc | 2 +- .../developer-tools-troubleshooting.asciidoc | 4 ++-- .../devtools/general-developer-tools.asciidoc | 15 +++++++++++---- .../profile-queries-and-aggregations.asciidoc | 4 ++++ 7 files changed, 23 insertions(+), 12 deletions(-) diff --git a/serverless/images/es-badge.svg b/serverless/images/es-badge.svg index 8e4fcd839c..8176b81d2b 100644 --- a/serverless/images/es-badge.svg +++ b/serverless/images/es-badge.svg @@ -4,7 +4,7 @@ - + diff --git a/serverless/images/obs-badge.svg b/serverless/images/obs-badge.svg index 436ea65bcd..0996e309a2 100644 --- a/serverless/images/obs-badge.svg +++ b/serverless/images/obs-badge.svg @@ -4,7 +4,7 @@ - + diff --git a/serverless/images/sec-badge.svg b/serverless/images/sec-badge.svg index 2a2da118f1..1e743a20cf 100644 --- a/serverless/images/sec-badge.svg +++ b/serverless/images/sec-badge.svg @@ -1,7 +1,7 @@ - + - - + + diff --git a/serverless/pages/devtools/debug-grok-expressions.asciidoc b/serverless/pages/devtools/debug-grok-expressions.asciidoc index 675f0342f2..0624bdba93 100644 --- a/serverless/pages/devtools/debug-grok-expressions.asciidoc +++ b/serverless/pages/devtools/debug-grok-expressions.asciidoc @@ -40,7 +40,7 @@ This example walks you through using the **Grok Debugger**. [NOTE] ==== The **Admin** role is required to use the Grok Debugger. -For more information, refer to https://www.elastic.co/docs/current/serverless/general/assign-user-roles[] +For more information, refer to <> ==== . From the main menu, click **Developer Tools**, then click **Grok Debugger**. diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc index 4a01078719..bd2a61a21b 100644 --- a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc +++ b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc @@ -163,7 +163,7 @@ GET /my-index-000001/_count } ---- -If the field is aggregatable, you can use https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-aggregations[aggregations] +If the field is aggregatable, you can use <> to check the field's values. For `keyword` fields, you can use a `terms` aggregation to retrieve the field's most common values: @@ -288,7 +288,7 @@ GET /my-index-000001/_settings You can update dynamic index settings with the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-settings[**Update index settings API**]. Changing dynamic index settings for a data stream - requires changing the index template used by the data stream. +requires changing the index template used by the data stream. For static settings, you need to create a new index with the correct settings. Next, you can reindex the data into that index. diff --git a/serverless/pages/devtools/general-developer-tools.asciidoc b/serverless/pages/devtools/general-developer-tools.asciidoc index bcc08df6e0..b13659d57b 100644 --- a/serverless/pages/devtools/general-developer-tools.asciidoc +++ b/serverless/pages/devtools/general-developer-tools.asciidoc @@ -8,17 +8,24 @@ preview:[] | <> | Interact with Elastic REST APIs. -| {es-badge}{obs-badge}{sec-badge} +| {es-badge} + +{obs-badge} + +{sec-badge} | <> | Inspect and analyze your search queries. -| {es-badge}{obs-badge}{sec-badge} +| {es-badge} + +{obs-badge} + +{sec-badge} | <> | Build and debug grok patterns before you use them in your data processing pipelines. -| {es-badge}{obs-badge}{sec-badge} +| {es-badge} + +{obs-badge} + +{sec-badge} | <> | Use an interactive code editor to test and debug Painless scripts in real time. -| {obs-badge}{sec-badge} +| {obs-badge} + +{sec-badge} |=== diff --git a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc index 0773a029dd..3fcddfb5cb 100644 --- a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc +++ b/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc @@ -142,17 +142,21 @@ components and includes a simple aggregation: [role="screenshot"] image::images/profiler-gs8.png[Profiling the more complicated query] + +-- ** The top `BooleanQuery` component corresponds to the `bool` in the query. ** The second `BooleanQuery` corresponds to the `terms` query, which is internally converted to a `Boolean` of `should` clauses. It has two child queries that correspond to "sally" and "sue from the `terms` query. ** The `TermQuery` that's labeled with "name:fred" corresponds to `match: fred` in the query. +-- + In the time columns, the **Self time** and **Total time** are no longer identical on all rows: + +-- ** **Self time** represents how long the query component took to execute. ** **Total time** is the time a query component and all its children took to execute. +-- + Therefore, queries like the Boolean queries often have a larger total time than self time. . Select **Aggregation Profile** to view aggregation profiling statistics. From 742d2dc97ce8acda512cf41f08cfad0f28073be1 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Thu, 31 Oct 2024 17:10:38 -0500 Subject: [PATCH 14/25] qa project-settings --- .../action-connectors.asciidoc | 54 +++++++++---------- .../project-settings/action-connectors.mdx | 2 +- .../pages/project-settings/spaces.asciidoc | 4 +- .../pages/project-settings/tags.asciidoc | 1 + serverless/pages/project-settings/tags.mdx | 1 + 5 files changed, 33 insertions(+), 29 deletions(-) diff --git a/serverless/pages/project-settings/action-connectors.asciidoc b/serverless/pages/project-settings/action-connectors.asciidoc index b20153bb23..2c18c1955e 100644 --- a/serverless/pages/project-settings/action-connectors.asciidoc +++ b/serverless/pages/project-settings/action-connectors.asciidoc @@ -10,33 +10,33 @@ This content applies to: {es-badge} {obs-badge} {sec-badge} The list of available connectors varies by project type. -* {kibana-ref}/bedrock-action-type.html[Amazon Bedrock]: Send a request to Amazon Bedrock. -* {kibana-ref}/cases-action-type.html[Cases]: Add alerts to cases. -* {kibana-ref}/crowdstrike-action-type.html[CrowdStrike]: Send a request to CrowdStrike. -* {kibana-ref}/d3security-action-type.html[D3 Security]: Create an event or trigger playbook workflow actions in D3 SOAR. -* {kibana-ref}/email-action-type.html[Email]: Send email from your server. -* https://www.elastic.co/guide/en/kibana/master/gemini-action-type.html[Google Gemini]: Send a request to Google Gemini. -* {kibana-ref}/resilient-action-type.html[IBM Resilient]: Create an incident in IBM Resilient. -* {kibana-ref}/index-action-type.html[Index]: Index data into Elasticsearch. -* {kibana-ref}/jira-action-type.html[Jira]: Create an incident in Jira. -* {kibana-ref}/teams-action-type.html[Microsoft Teams]: Send a message to a Microsoft Teams channel. -* {kibana-ref}/obs-ai-assistant-action-type.html[Observability AI Assistant]: Add AI-driven insights and custom actions to your workflow. -* {kibana-ref}/openai-action-type.html[OpenAI]: Send a request to OpenAI. -* {kibana-ref}/opsgenie-action-type.html[Opsgenie]: Create or close an alert in Opsgenie. -* {kibana-ref}/pagerduty-action-type.html[PagerDuty]: Send an event in PagerDuty. -* {kibana-ref}/sentinelone-action-type.html[SentinelOne]: Perform response actions on SentinelOne-protected hosts. -* {kibana-ref}/server-log-action-type.html[ServerLog]: Add a message to a Kibana log. -* {kibana-ref}/servicenow-itom-action-type.html[ServiceNow ITOM]: Create an event in ServiceNow ITOM. -* {kibana-ref}/servicenow-action-type.html[ServiceNow ITSM]: Create an incident in ServiceNow ITSM. -* {kibana-ref}/servicenow-sir-action-type.html[ServiceNow SecOps]: Create a security incident in ServiceNow SecOps. -* {kibana-ref}/slack-action-type.html[Slack]: Send messages to Slack channels. -* {kibana-ref}/swimlane-action-type.html[Swimlane]: Create records in Swimlane. -* {kibana-ref}/thehive-action-type.html[TheHive]: Create cases and alerts in TheHive. -* {kibana-ref}/tines-action-type.html[Tines]: Send events to a story. -* {kibana-ref}/torq-action-type.html[Torq]: Trigger a Torq workflow. -* {kibana-ref}/webhook-action-type.html[Webhook]: Send a request to a web service. -* {kibana-ref}/cases-webhook-action-type.html[Webhook - Case Management]: Send a request to a Case Management web service. -* {kibana-ref}/xmatters-action-type.html[xMatters]: Trigger an xMatters workflow. +* {kibana-ref}/bedrock-action-type.html[*Amazon Bedrock*^]: Send a request to Amazon Bedrock. +* {kibana-ref}/cases-action-type.html[*Cases*^]: Add alerts to cases. +* {kibana-ref}/crowdstrike-action-type.html[*CrowdStrike*^]: Send a request to CrowdStrike. +* {kibana-ref}/d3security-action-type.html[*D3 Security*^]: Create an event or trigger playbook workflow actions in D3 SOAR. +* {kibana-ref}/email-action-type.html[*Email*^]: Send email from your server. +* {kibana-ref}/gemini-action-type.html[*Google Gemini*^]: Send a request to Google Gemini. +* {kibana-ref}/resilient-action-type.html[*IBM Resilient*^]: Create an incident in IBM Resilient. +* {kibana-ref}/index-action-type.html[*Index*^]: Index data into Elasticsearch. +* {kibana-ref}/jira-action-type.html[*Jira*^]: Create an incident in Jira. +* {kibana-ref}/teams-action-type.html[*Microsoft Teams*^]: Send a message to a Microsoft Teams channel. +* {kibana-ref}/obs-ai-assistant-action-type.html[*Observability AI Assistant*^]: Add AI-driven insights and custom actions to your workflow. +* {kibana-ref}/openai-action-type.html[*OpenAI*^]: Send a request to OpenAI. +* {kibana-ref}/opsgenie-action-type.html[*Opsgenie*^]: Create or close an alert in Opsgenie. +* {kibana-ref}/pagerduty-action-type.html[*PagerDuty*^]: Send an event in PagerDuty. +* {kibana-ref}/sentinelone-action-type.html[*SentinelOne*^]: Perform response actions on SentinelOne-protected hosts. +* {kibana-ref}/server-log-action-type.html[*ServerLog*^]: Add a message to a Kibana log. +* {kibana-ref}/servicenow-itom-action-type.html[*ServiceNow ITOM*^]: Create an event in ServiceNow ITOM. +* {kibana-ref}/servicenow-action-type.html[*ServiceNow ITSM*^]: Create an incident in ServiceNow ITSM. +* {kibana-ref}/servicenow-sir-action-type.html[*ServiceNow SecOps*^]: Create a security incident in ServiceNow SecOps. +* {kibana-ref}/slack-action-type.html[*Slack*^]: Send messages to Slack channels. +* {kibana-ref}/swimlane-action-type.html[*Swimlane*^]: Create records in Swimlane. +* {kibana-ref}/thehive-action-type.html[*TheHive*^]: Create cases and alerts in TheHive. +* {kibana-ref}/tines-action-type.html[*Tines*^]: Send events to a story. +* {kibana-ref}/torq-action-type.html[*Torq*^]: Trigger a Torq workflow. +* {kibana-ref}/webhook-action-type.html[*Webhook*^]: Send a request to a web service. +* {kibana-ref}/cases-webhook-action-type.html[*Webhook - Case Management*^]: Send a request to a Case Management web service. +* {kibana-ref}/xmatters-action-type.html[*xMatters*^]: Trigger an xMatters workflow. //// /* Connectors provide a central place to store connection information for services and integrations with third party systems. diff --git a/serverless/pages/project-settings/action-connectors.mdx b/serverless/pages/project-settings/action-connectors.mdx index 856f1a329a..98c109cc88 100644 --- a/serverless/pages/project-settings/action-connectors.mdx +++ b/serverless/pages/project-settings/action-connectors.mdx @@ -46,7 +46,7 @@ The list of available connectors varies by project type. { "title": "Google Gemini", "description": "Send a request to Google Gemini.", - "href": "https://www.elastic.co/guide/en/kibana/master/gemini-action-type.html", + "href": "((kibana-ref))/gemini-action-type.html", "target": "_blank" }, { diff --git a/serverless/pages/project-settings/spaces.asciidoc b/serverless/pages/project-settings/spaces.asciidoc index 22e2fc0888..969356d08d 100644 --- a/serverless/pages/project-settings/spaces.asciidoc +++ b/serverless/pages/project-settings/spaces.asciidoc @@ -41,7 +41,8 @@ You can have up to 100 spaces in a project. {kib} also has an https://www.elastic.co/docs/api/doc/serverless/group/endpoint-spaces[API] if you prefer to create spaces programmatically. -ifeval::["{serverlessCustomRoles}" == "true"] +ifdef::serverlessCustomRoles[] + [discrete] [[spaces-customize-access-to-space]] == Customize access to space @@ -49,6 +50,7 @@ ifeval::["{serverlessCustomRoles}" == "true"] Customizing access to a space is available for the following project types only: {es-badge} {sec-badge} As an administrator, you can define custom roles with specific access to certain spaces and features in a project. Refer to <>. + endif::[] [discrete] diff --git a/serverless/pages/project-settings/tags.asciidoc b/serverless/pages/project-settings/tags.asciidoc index 51abeee359..9652405b1a 100644 --- a/serverless/pages/project-settings/tags.asciidoc +++ b/serverless/pages/project-settings/tags.asciidoc @@ -60,6 +60,7 @@ To assign and remove tags, you must have `write` permission on the objects to wh . Find the tag you want to assign. . Click the actions icon and then select **Manage assignments**. . Select the objects to which you want to assign or remove tags. ++ [role="screenshot"] image::images/tag-assignment.png[Assign tags to saved objects] . Click **Save tag assignments**. diff --git a/serverless/pages/project-settings/tags.mdx b/serverless/pages/project-settings/tags.mdx index 9bb184d181..465c7a02ec 100644 --- a/serverless/pages/project-settings/tags.mdx +++ b/serverless/pages/project-settings/tags.mdx @@ -55,6 +55,7 @@ To assign and remove tags, you must have `write` permission on the objects to wh 1. Click the actions icon and then select **Manage assignments**. 1. Select the objects to which you want to assign or remove tags. + ![Assign tags to saved objects](../images/tag-assignment.png) 1. Click **Save tag assignments**. From 246642b202bbf70da64d9c60429ab51faca6e18a Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Thu, 31 Oct 2024 17:10:55 -0500 Subject: [PATCH 15/25] qa hidden pages --- ...your-data-visualize-your-data-create-visualizations.asciidoc | 2 +- ...lore-your-data-visualize-your-data-create-visualizations.mdx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc index d245593406..da47b2c2b4 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc @@ -349,7 +349,7 @@ To personalize your dashboards, add your own logos and graphics with the **Image . Click **Save**. . To save the new image panel to your dashboard click **Save**. -To manage your uploaded image files, open the main menu, then click ** Management → Files**. +To manage your uploaded image files, open the main menu, then click **Management → Files**. [WARNING] ==== diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx index 78e5013d1b..715305a946 100644 --- a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx +++ b/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx @@ -391,7 +391,7 @@ To personalize your dashboards, add your own logos and graphics with the **Image 1. To save the new image panel to your dashboard click **Save**. -To manage your uploaded image files, open the main menu, then click ** Management → Files**. +To manage your uploaded image files, open the main menu, then click **Management → Files**. From 0ac67088c4b7afc2ad0710bc9070d81b272680cc Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Thu, 31 Oct 2024 17:12:22 -0500 Subject: [PATCH 16/25] simplify landing page --- .../pages/welcome-to-serverless.asciidoc | 31 ++++++------------- 1 file changed, 9 insertions(+), 22 deletions(-) diff --git a/serverless/pages/welcome-to-serverless.asciidoc b/serverless/pages/welcome-to-serverless.asciidoc index 4c9314753f..b9da6c0887 100644 --- a/serverless/pages/welcome-to-serverless.asciidoc +++ b/serverless/pages/welcome-to-serverless.asciidoc @@ -31,27 +31,25 @@ Elastic serverless products are currently in preview. https://www.elastic.co/blo Choose the type of project that matches your needs and we’ll help you get started with our solution guides. -:hardbreaks-option: - [cols="1,1"] |=== | | | image:https://www.elastic.co/docs/assets/images/elasticsearch.png[width=150] -a| [.card-title]#Elasticsearch# +a| [.card-title]#Elasticsearch# + Build custom search applications with Elasticsearch. <> | image:https://www.elastic.co/docs/assets/images/observability.png[width=150] -a| [.card-title]#Observability# +a| [.card-title]#Observability# + Monitor applications and systems with Elastic Observability. <> | image:https://www.elastic.co/docs/assets/images/security.png[width=150] -a| [.card-title]#Security# +a| [.card-title]#Security# + Detect, investigate, and respond to threats with Elastic Security. <> @@ -63,26 +61,15 @@ Detect, investigate, and respond to threats with Elastic Security. [discrete] == Featured topics -|=== -| - -a| <> +* <>: Invite new members to your organization. - -a| <> +* <>: Assign user roles and privileges to members in your organization. - -a| <> +* <>: Manage your project data, search power, and more. - -a| <> +* <>: View the details about your subscription. - -a| <> +* <>: Check past and current usage for your projects. - -a| <> +* <>: Manage your indices, data views, and more. -|=== - -:hardbreaks-option!: From 50d7a5f4955ef5e8628840520db8c8cecd411562 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Fri, 1 Nov 2024 04:14:10 -0500 Subject: [PATCH 17/25] Fix MDX syntax, broken links, and use of updated variables (#159) * fix mdx syntax and links * fix duplicate Elastic --- serverless/pages/welcome-to-serverless.mdx | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/serverless/pages/welcome-to-serverless.mdx b/serverless/pages/welcome-to-serverless.mdx index 96171d8d33..71202d543b 100644 --- a/serverless/pages/welcome-to-serverless.mdx +++ b/serverless/pages/welcome-to-serverless.mdx @@ -7,15 +7,15 @@ layout: landing # Elastic Cloud Serverless -Elastic Cloud Serverless products allow you to deploy and use Elastic for your use cases without managing the underlying Elastic cluster, -such as nodes, data tiers, and scaling. Serverless instances are fully-managed, autoscaled, and automatically upgraded by Elastic so you can -focus more on gaining value and insight from your data. +Elastic Cloud Serverless products allow you to deploy and use Elastic for your use cases without managing the underlying Elastic cluster, +such as nodes, data tiers, and scaling. Serverless instances are fully-managed, autoscaled, and automatically upgraded by Elastic so you can +focus more on gaining value and insight from your data. Elastic provides three serverless solutions available on ((ecloud)): - **((es))** — Build powerful applications and search experiences using a rich ecosystem of vector search capabilities, APIs, and libraries. -- **Elastic ((observability))** — Monitor your own platforms and services using powerful machine learning and analytics tools with your logs, metrics, traces, and APM data. -- **Elastic ((security))** — Detect, investigate, and respond to threats, with SIEM, endpoint protection, and AI-powered analytics capabilities. +- **((observability))** — Monitor your own platforms and services using powerful machine learning and analytics tools with your logs, metrics, traces, and APM data. +- **((security))** — Detect, investigate, and respond to threats, with SIEM, endpoint protection, and AI-powered analytics capabilities. Serverless instances of the Elastic Stack that you create in ((ecloud)) are called **serverless projects**. From c17e46f6d7d4597d820245d8b53f69e80490337b Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 10:03:05 -0600 Subject: [PATCH 18/25] clean post rebase and qa --- serverless/index.asciidoc | 1 + .../developer-tools-troubleshooting.asciidoc | 1 + .../developer-tools-troubleshooting.mdx | 25 ++++++++++--------- ...lore-your-data-discover-your-data.asciidoc | 17 ++++++------- .../elasticsearch/search-with-synonyms.mdx | 2 +- 5 files changed, 23 insertions(+), 23 deletions(-) diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index c446dd6687..e31af05b5b 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -300,6 +300,7 @@ include::{security-serverless}/projects-create/create-project.asciidoc[leveloffs include::{security-serverless}/sec-requirements.asciidoc[leveloffset=+2] include::{security-serverless}/security-ui.asciidoc[leveloffset=+2] +include::{security-serverless}/security-spaces.asciidoc[leveloffset=+3] include::{security-serverless}/AI-for-security/ai-for-security-landing-pg.asciidoc[leveloffset=+2] include::{security-serverless}/AI-for-security/ai-assistant.asciidoc[leveloffset=+3] diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc index bd2a61a21b..a7c09a63cd 100644 --- a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc +++ b/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc @@ -288,6 +288,7 @@ GET /my-index-000001/_settings You can update dynamic index settings with the https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-settings[**Update index settings API**]. Changing dynamic index settings for a data stream +// Changing dynamic index settings for a data stream requires changing the index template used by the data stream. For static settings, you need to create a new index with the correct settings. diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.mdx b/serverless/pages/devtools/developer-tools-troubleshooting.mdx index 2371d50a54..a1f8273c0d 100644 --- a/serverless/pages/devtools/developer-tools-troubleshooting.mdx +++ b/serverless/pages/devtools/developer-tools-troubleshooting.mdx @@ -16,11 +16,11 @@ Elasticsearch returns an `index_not_found_exception` when the data stream, index or alias you try to query does not exist. This can happen when you misspell the name or when the data has been indexed to a different data stream or index. -Use the [**Exists API**](https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-exists-index-template) to check whether +Use the [**Exists API**](https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-exists-index-template) to check whether a data stream, index, or alias exists: ```js -HEAD my-index +HEAD my-index ``` Use the [**Get index API**](https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-get) @@ -43,7 +43,7 @@ GET /my-alias/_search?ignore_unavailable=true When a search request returns no hits, the data stream or index may contain no data. This can happen when there is a data ingestion issue. -For example, the data may have been indexed to a data stream or index with +For example, the data may have been indexed to a data stream or index with another name. Use the [**Count API**](https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-count-1) @@ -142,8 +142,8 @@ GET /my-index-000001/_count } ``` -If the field is aggregatable, you can use -to check the field's values. For `keyword` fields, you can use a `terms` +If the field is aggregatable, you can use +to check the field's values. For `keyword` fields, you can use a `terms` aggregation to retrieve the field's most common values: ```js @@ -215,7 +215,7 @@ GET /my-index-000001/_validate/query?rewrite=true } ``` -Use the [**Explain API**](((ref))/search-explain.html) to find out why a +Use the [**Explain API**](((ref))/search-explain.html) to find out why a specific document matches or doesn’t match a query: ```js @@ -229,7 +229,7 @@ GET /my-index-000001/_explain/0 The [**Profile API**](((ref))/search-profile.html) provides detailed timing information about a search request. -For a visual representation of the results, use the +For a visual representation of the results, use the . @@ -240,7 +240,7 @@ You can now copy the query sent to ((es)) for further analysis in Console. ## Check index settings -Index settings {/* Index settings */} +Index settings {/* Index settings */} can influence search results. For example, the `index.query.default_field` setting, which determines the field that is queried when a query specifies no explicit field. @@ -251,10 +251,11 @@ to retrieve the settings for an index: GET /my-index-000001/_settings ``` -You can update dynamic index settings with the -[**Update index settings API**](https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-settings). -Changing dynamic index settings for a data stream -{/* Changing dynamic index settings for a data stream */} requires changing the index template used by the data stream. +You can update dynamic index settings with the +[**Update index settings API**](https://www.elastic.co/docs/api/doc/elasticsearch-serverless/operation/operation-indices-put-settings). +Changing dynamic index settings for a data stream +{/* Changing dynamic index settings for a data stream */} +requires changing the index template used by the data stream. For static settings, you need to create a new index with the correct settings. Next, you can reindex the data into that index. diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc index 6422146fe1..ba24c07299 100644 --- a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc +++ b/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc @@ -42,20 +42,17 @@ image::images/create-data-view.png[Create a data view] + . Start typing in the **Index pattern** field, and the names of indices, data streams, and aliases that match your input will be displayed. + -* To match multiple sources, use a wildcard (*), for example, `b*` and any indices starting with the letter `b` display. -* To match multiple sources, enter their names separated by a comma. Do not include a space after the comma. For example `books,magazines` would match two indices: `books` and `magazines`. -* To exclude a source, use a minus sign (-), for example `-books`. -+ +** To match multiple sources, use a wildcard (*), for example, `b*` and any indices starting with the letter `b` display. +** To match multiple sources, enter their names separated by a comma. Do not include a space after the comma. For example `books,magazines` would match two indices: `books` and `magazines`. +** To exclude a source, use a minus sign (-), for example `-books`. . In the **Timestamp** field dropdown, and then select `release_date`. + -* If you don't set a time field, you can't use global time filters on your dashboards. Leaving the time field unset might be useful if you have multiple time fields and want to create dashboards that combine visualizations based on different timestamps. -* If your index doesn't have time-based data, choose **I don't want to use the time filter**. -+ +** If you don't set a time field, you can't use global time filters on your dashboards. Leaving the time field unset might be useful if you have multiple time fields and want to create dashboards that combine visualizations based on different timestamps. +** If your index doesn't have time-based data, choose **I don't want to use the time filter**. . Click **Show advanced settings** to: + -* Display hidden and system indices. -* Specify your own data view name. For example, enter your Elasticsearch index alias name. -+ +** Display hidden and system indices. +** Specify your own data view name. For example, enter your Elasticsearch index alias name. . Click **Save data view to {kib}**. . Adjust the time range to view data for the **Last 40 years** to view all your book data. + diff --git a/serverless/pages/elasticsearch/search-with-synonyms.mdx b/serverless/pages/elasticsearch/search-with-synonyms.mdx index 810402dfeb..9f3c1a8787 100644 --- a/serverless/pages/elasticsearch/search-with-synonyms.mdx +++ b/serverless/pages/elasticsearch/search-with-synonyms.mdx @@ -64,7 +64,7 @@ Check each synonym token filter documentation for configuration details and inst ### Test your analyzer You can test an analyzer configuration without modifying your index settings. -Use the [analyze API](((ref)/indices-analyze.html)) to test your analyzer chain: +Use the [analyze API](((ref))/indices-analyze.html) to test your analyzer chain: ```bash curl "${ES_URL}/my-index/_analyze?pretty" \ From 0ead9070cd75e464daaf95b9fea409577a119759 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 10:40:12 -0600 Subject: [PATCH 19/25] restructure index --- serverless/index.asciidoc | 485 +----------------- serverless/pages/devtools/index.asciidoc | 15 +- serverless/pages/elasticsearch/index.asciidoc | 58 +++ serverless/pages/general/index.asciidoc | 34 ++ .../pages/project-settings/index.asciidoc | 45 +- 5 files changed, 133 insertions(+), 504 deletions(-) create mode 100644 serverless/pages/elasticsearch/index.asciidoc create mode 100644 serverless/pages/general/index.asciidoc diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index e31af05b5b..e4f40b14a9 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -3,10 +3,10 @@ include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] include::{asciidoc-dir}/../../shared/attributes.asciidoc[] +:general-serverless: {docs-content-root}/serverless/pages/general +:elasticsearch-serverless: {docs-content-root}/serverless/pages/elasticsearch :security-serverless: {security-docs-root}/docs/serverless :observability-serverless: {observability-docs-root}/docs/en/serverless -:elasticsearch-serverless: {docs-content-root}/serverless/pages/elasticsearch -:general-serverless: {docs-content-root}/serverless/pages/general :devtools-serverless: {docs-content-root}/serverless/pages/devtools :project-settings-serverless: {docs-content-root}/serverless/pages/project-settings @@ -42,481 +42,12 @@ include::{asciidoc-dir}/../../shared/attributes.asciidoc[] = Serverless -[[intro]] -== Welcome to Elastic serverless - -include::{docs-content-root}/serverless/pages/welcome-to-serverless.asciidoc[leveloffset=+2] - -include::{general-serverless}/what-is-serverless.asciidoc[leveloffset=+2] - -include::{general-serverless}/sign-up.asciidoc[leveloffset=+2] - -include::{general-serverless}/manage-org.asciidoc[leveloffset=+2] -include::{general-serverless}/manage-access-to-org.asciidoc[leveloffset=+3] -include::{general-serverless}/manage-access-to-org-user-roles.asciidoc[leveloffset=+3] -include::{general-serverless}/manage-access-to-org-from-existing-account.asciidoc[leveloffset=+3] - -include::{general-serverless}/manage-your-project.asciidoc[leveloffset=+2] -include::{general-serverless}/manage-your-project-rest-api.asciidoc[leveloffset=+3] - -include::{general-serverless}/manage-billing.asciidoc[leveloffset=+2] -include::{general-serverless}/manage-billing-check-subscription.asciidoc[leveloffset=+3] -include::{general-serverless}/manage-billing-monitor-usage.asciidoc[leveloffset=+3] -include::{general-serverless}/manage-billing-history.asciidoc[leveloffset=+3] -include::{general-serverless}/manage-billing-pricing-model.asciidoc[leveloffset=+3] -include::{general-serverless}/manage-billing-stop-project.asciidoc[leveloffset=+3] - -include::{general-serverless}/service-status.asciidoc[leveloffset=+2] - -include::{general-serverless}/user-profile.asciidoc[leveloffset=+2] - -include::{general-serverless}/cloud-regions.asciidoc[leveloffset=+2] - -[[what-is-elasticsearch-serverless]] -== Elasticsearch serverless - -++++ -Elasticsearch -++++ - -include::{elasticsearch-serverless}/what-is-elasticsearch-serverless.asciidoc[leveloffset=+2] - -include::{elasticsearch-serverless}/pricing.asciidoc[leveloffset=+2] - -include::{elasticsearch-serverless}/get-started.asciidoc[leveloffset=+2] - -include::{elasticsearch-serverless}/clients.asciidoc[leveloffset=+2] -include::{elasticsearch-serverless}/clients-go-getting-started.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/clients-java-getting-started.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/clients-dot-net-getting-started.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/clients-nodejs-getting-started.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/clients-php-getting-started.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/clients-python-getting-started.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/clients-ruby-getting-started.asciidoc[leveloffset=+3] - -include::{elasticsearch-serverless}/apis-http-apis.asciidoc[leveloffset=+2] -include::{elasticsearch-serverless}/apis-elasticsearch-conventions.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/apis-kibana-conventions.asciidoc[leveloffset=+3] - -include::{elasticsearch-serverless}/elasticsearch-developer-tools.asciidoc[leveloffset=+2] - -include::{elasticsearch-serverless}/ingest-your-data.asciidoc[leveloffset=+2] -include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/ingest-your-data-upload-file.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+3] - -include::{elasticsearch-serverless}/search-your-data.asciidoc[leveloffset=+2] -include::{elasticsearch-serverless}/search-your-data-the-search-api.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/search-with-synonyms.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/knn-search.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/search-your-data-semantic-search.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/search-your-data-semantic-search-elser.asciidoc[leveloffset=+4] - -include::{elasticsearch-serverless}/explore-your-data.asciidoc[leveloffset=+2] -include::{elasticsearch-serverless}/explore-your-data-the-aggregations-api.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/explore-your-data-discover-your-data.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/explore-your-data-visualize-your-data.asciidoc[leveloffset=+3] -include::{elasticsearch-serverless}/explore-your-data-alerting.asciidoc[leveloffset=+3] - -include::{elasticsearch-serverless}/search-playground.asciidoc[leveloffset=+2] - -include::{elasticsearch-serverless}/serverless-differences.asciidoc[leveloffset=+2] - -include::{elasticsearch-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] - -[[what-is-observability-serverless]] -== Elastic Observability serverless - -++++ -Elastic Observability -++++ - -include::{observability-serverless}/what-is-observability-serverless.asciidoc[leveloffset=+2] - -include::{observability-serverless}/observability-overview.asciidoc[leveloffset=+2] - -include::{observability-serverless}/quickstarts/overview.asciidoc[leveloffset=+2] -include::{observability-serverless}/quickstarts/monitor-hosts-with-elastic-agent.asciidoc[leveloffset=+3] -include::{observability-serverless}/quickstarts/k8s-logs-metrics.asciidoc[leveloffset=+3] - -include::{observability-serverless}/projects/billing.asciidoc[leveloffset=+2] - -include::{observability-serverless}/projects/create-an-observability-project.asciidoc[leveloffset=+2] - -include::{observability-serverless}/logging/log-monitoring.asciidoc[leveloffset=+2] -include::{observability-serverless}/logging/get-started-with-logs.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/stream-log-files.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/correlate-application-logs.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/plaintext-application-logs.asciidoc[leveloffset=+4] -include::{observability-serverless}/logging/ecs-application-logs.asciidoc[leveloffset=+4] -include::{observability-serverless}/logging/send-application-logs.asciidoc[leveloffset=+4] -include::{observability-serverless}/logging/parse-log-data.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/filter-and-aggregate-logs.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/view-and-monitor-logs.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/add-logs-service-name.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/run-log-pattern-analysis.asciidoc[leveloffset=+3] -include::{observability-serverless}/logging/troubleshoot-logs.asciidoc[leveloffset=+3] - -include::{observability-serverless}/inventory.asciidoc[leveloffset=+2] - -include::{observability-serverless}/apm/apm.asciidoc[leveloffset=+2] -include::{observability-serverless}/apm/apm-get-started.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-send-traces-to-elastic.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm-agents/apm-agents-elastic-apm-agents.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm-agents/apm-agents-opentelemetry.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-opentelemetry-native-support.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-collect-metrics.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-limitations.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm-agents/apm-agents-opentelemetry-resource-attributes.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm-agents/apm-agents-aws-lambda-functions.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-view-and-analyze-traces.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-find-transaction-latency-and-failure-correlations.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-integrate-with-machine-learning.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-create-custom-links.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-track-deployments-with-annotations.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-query-your-data.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-filter-your-data.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-observe-lambda-functions.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-ui-overview.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-ui-services.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-traces.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-dependencies.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-service-map.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-service-overview.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-transactions.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-trace-sample-timeline.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-errors.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-metrics.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-infrastructure.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-ui-logs.asciidoc[leveloffset=+5] -include::{observability-serverless}/apm/apm-data-types.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-distributed-tracing.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-reduce-your-data-usage.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-transaction-sampling.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-compress-spans.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-stacktrace-collection.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-keep-data-secure.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-troubleshooting.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-reference.asciidoc[leveloffset=+3] -include::{observability-serverless}/apm/apm-kibana-settings.asciidoc[leveloffset=+4] -include::{observability-serverless}/apm/apm-server-api.asciidoc[leveloffset=+4] - -include::{observability-serverless}/infra-monitoring/infra-monitoring.asciidoc[leveloffset=+2] -include::{observability-serverless}/infra-monitoring/get-started-with-metrics.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/view-infrastructure-metrics.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/analyze-hosts.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/detect-metric-anomalies.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/configure-infra-settings.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/troubleshooting-infra.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/handle-no-results-found-message.asciidoc[leveloffset=+4] -include::{observability-serverless}/infra-monitoring/metrics-reference.asciidoc[leveloffset=+3] -include::{observability-serverless}/infra-monitoring/host-metrics.asciidoc[leveloffset=+4] -include::{observability-serverless}/infra-monitoring/container-metrics.asciidoc[leveloffset=+4] -include::{observability-serverless}/infra-monitoring/kubernetes-pod-metrics.asciidoc[leveloffset=+4] -include::{observability-serverless}/infra-monitoring/aws-metrics.asciidoc[leveloffset=+4] -include::{observability-serverless}/infra-monitoring/metrics-app-fields.asciidoc[leveloffset=+3] - -include::{observability-serverless}/synthetics/synthetics-intro.asciidoc[leveloffset=+2] -include::{observability-serverless}/synthetics/synthetics-get-started.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-get-started-project.asciidoc[leveloffset=+4] -include::{observability-serverless}/synthetics/synthetics-get-started-ui.asciidoc[leveloffset=+4] -include::{observability-serverless}/synthetics/synthetics-journeys.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-create-test.asciidoc[leveloffset=+4] -include::{observability-serverless}/synthetics/synthetics-monitor-use.asciidoc[leveloffset=+4] -include::{observability-serverless}/synthetics/synthetics-recorder.asciidoc[leveloffset=+4] -include::{observability-serverless}/synthetics/synthetics-lightweight.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-manage-monitors.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-params-secrets.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-analyze.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-private-location.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-command-reference.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-configuration.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-settings.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-feature-roles.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-manage-retention.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-scale-and-architect.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-security-encryption.asciidoc[leveloffset=+3] -include::{observability-serverless}/synthetics/synthetics-troubleshooting.asciidoc[leveloffset=+3] - -include::{observability-serverless}/dashboards/dashboards-and-visualizations.asciidoc[leveloffset=+2] - -include::{observability-serverless}/alerting/alerting.asciidoc[leveloffset=+2] -include::{observability-serverless}/alerting/create-manage-rules.asciidoc[leveloffset=+3] -include::{observability-serverless}/alerting/aiops-generate-anomaly-alerts.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-anomaly-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-custom-threshold-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-elasticsearch-query-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-error-count-threshold-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-failed-transaction-rate-threshold-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-inventory-threshold-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-latency-threshold-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/create-slo-burn-rate-alert-rule.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/synthetic-monitor-status-alert.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/aggregation-options.asciidoc[leveloffset=+3] -include::{observability-serverless}/alerting/rate-aggregation.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/view-alerts.asciidoc[leveloffset=+3] -include::{observability-serverless}/alerting/triage-slo-burn-rate-breaches.asciidoc[leveloffset=+4] -include::{observability-serverless}/alerting/triage-threshold-breaches.asciidoc[leveloffset=+4] - -include::{observability-serverless}/slos/slos.asciidoc[leveloffset=+2] -include::{observability-serverless}/slos/create-an-slo.asciidoc[leveloffset=+3] - -include::{observability-serverless}/cases/cases.asciidoc[leveloffset=+2] -include::{observability-serverless}/cases/create-manage-cases.asciidoc[leveloffset=+3] -include::{observability-serverless}/cases/manage-cases-settings.asciidoc[leveloffset=+3] - -include::{observability-serverless}/aiops/aiops.asciidoc[leveloffset=+2] -include::{observability-serverless}/aiops/aiops-detect-anomalies.asciidoc[leveloffset=+3] -include::{observability-serverless}/aiops/aiops-tune-anomaly-detection-job.asciidoc[leveloffset=+4] -include::{observability-serverless}/aiops/aiops-forecast-anomaly.asciidoc[leveloffset=+4] -include::{observability-serverless}/aiops/aiops-analyze-spikes.asciidoc[leveloffset=+3] -include::{observability-serverless}/aiops/aiops-detect-change-points.asciidoc[leveloffset=+3] - -include::{observability-serverless}/monitor-datasets.asciidoc[leveloffset=+2] - -include::{observability-serverless}/ai-assistant/ai-assistant.asciidoc[leveloffset=+2] - -include::{observability-serverless}/elastic-entity-model.asciidoc[leveloffset=+2] - -include::{observability-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] - -[[what-is-security-serverless]] -== Elastic Security serverless - -++++ -Elastic Security -++++ - -include::{security-serverless}/what-is-security-serverless.asciidoc[leveloffset=+2] - -include::{security-serverless}/security-overview.asciidoc[leveloffset=+2] - -include::{security-serverless}/billing.asciidoc[leveloffset=+2] - -include::{security-serverless}/projects-create/create-project.asciidoc[leveloffset=+2] - -include::{security-serverless}/sec-requirements.asciidoc[leveloffset=+2] - -include::{security-serverless}/security-ui.asciidoc[leveloffset=+2] -include::{security-serverless}/security-spaces.asciidoc[leveloffset=+3] - -include::{security-serverless}/AI-for-security/ai-for-security-landing-pg.asciidoc[leveloffset=+2] -include::{security-serverless}/AI-for-security/ai-assistant.asciidoc[leveloffset=+3] -include::{security-serverless}/AI-for-security/attack-discovery.asciidoc[leveloffset=+3] -include::{security-serverless}/AI-for-security/llm-connector-guides.asciidoc[leveloffset=+3] -include::{security-serverless}/AI-for-security/llm-performance-matrix.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/connect-to-azure-openai.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/connect-to-bedrock.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/connect-to-openai.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/connect-to-vertex.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/connect-to-byo-llm.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/ai-use-cases.asciidoc[leveloffset=+3] -include::{security-serverless}/AI-for-security/usecase-attack-disc-ai-assistant-incident-reporting.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/ai-assistant-alert-triage.asciidoc[leveloffset=+4] -include::{security-serverless}/AI-for-security/ai-assistant-esql-queries.asciidoc[leveloffset=+4] - -include::{security-serverless}/ingest/ingest-data.asciidoc[leveloffset=+2] -include::{security-serverless}/ingest/threat-intelligence.asciidoc[leveloffset=+3] -include::{security-serverless}/ingest/auto-import.asciidoc[leveloffset=+3] - -include::{security-serverless}/edr-install-config/endpoint-protection-intro.asciidoc[leveloffset=+2] -include::{security-serverless}/edr-install-config/deploy-endpoint-reqs.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-install-config/install-elastic-defend.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-install-config/deploy-endpoint-macos-cat-mont.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/deploy-endpoint-macos-ven.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/deploy-with-mdm.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/agent-tamper-protection.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/defend-feature-privs.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-install-config/configure-endpoint-integration-policy.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-install-config/artifact-control.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/endpoint-diagnostic-data.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/self-healing-rollback.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/linux-file-monitoring.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/endpoint-data-volume.asciidoc[leveloffset=+4] -include::{security-serverless}/edr-install-config/uninstall-agent.asciidoc[leveloffset=+3] - -include::{security-serverless}/edr-manage/manage-endpoint-protection.asciidoc[leveloffset=+2] -include::{security-serverless}/edr-manage/endpoints-page.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/policies-page-ov.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/trusted-apps-ov.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/event-filters.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/host-isolation-exceptions.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/blocklist.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/optimize-edr.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/endpoint-event-capture.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/allowlist-endpoint-3rd-party-av.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/endpoint-self-protection.asciidoc[leveloffset=+3] -include::{security-serverless}/edr-manage/endpoint-command-ref.asciidoc[leveloffset=+3] - -include::{security-serverless}/endpoint-response-actions/response-actions.asciidoc[leveloffset=+2] -include::{security-serverless}/endpoint-response-actions/automated-response-actions.asciidoc[leveloffset=+3] -include::{security-serverless}/endpoint-response-actions/host-isolation-ov.asciidoc[leveloffset=+3] -include::{security-serverless}/endpoint-response-actions/response-actions-history.asciidoc[leveloffset=+3] -include::{security-serverless}/endpoint-response-actions/third-party-actions.asciidoc[leveloffset=+3] -include::{security-serverless}/endpoint-response-actions/response-actions-config.asciidoc[leveloffset=+3] - -include::{security-serverless}/cloud-native-security/cloud-native-security-overview.asciidoc[leveloffset=+2] -include::{security-serverless}/cloud-native-security/security-posture-management.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/enable-cloudsec.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/cspm.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/cspm-get-started.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/cspm-get-started-gcp.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/cspm-get-started-azure.asciidoc[leveloffset=+4] -// include::{security-serverless}/cloud-native-security/cspm-findings-page.asciidoc[leveloffset=+4] -// include::{security-serverless}/cloud-native-security/benchmark-rules.asciidoc[leveloffset=+4] -// include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/cspm-security-posture-faq.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/kspm.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/get-started-with-kspm.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/cspm-findings-page.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/benchmark-rules.asciidoc[leveloffset=+4] -// include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/security-posture-faq.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/vuln-management-overview.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/vuln-management-get-started.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/vuln-management-findings.asciidoc[leveloffset=+4] -// include::{security-serverless}/dashboards/vuln-management-dashboard-dash.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/vuln-management-faq.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/d4c-overview.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/d4c-get-started.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/d4c-policy-guide.asciidoc[leveloffset=+4] -// include::{security-serverless}/dashboards/kubernetes-dashboard-dash.asciidoc[leveloffset=+4] -include::{security-serverless}/cloud-native-security/cloud-workload-protection.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/environment-variable-capture.asciidoc[leveloffset=+4] - -include::{security-serverless}/explore/explore-your-data.asciidoc[leveloffset=+2] -include::{security-serverless}/explore/hosts-overview.asciidoc[leveloffset=+3] -include::{security-serverless}/explore/network-page-overview.asciidoc[leveloffset=+3] -include::{security-serverless}/explore/conf-map-ui.asciidoc[leveloffset=+4] -include::{security-serverless}/explore/users-page.asciidoc[leveloffset=+3] -include::{security-serverless}/explore/data-views-in-sec.asciidoc[leveloffset=+3] -include::{security-serverless}/explore/runtime-fields.asciidoc[leveloffset=+3] -include::{security-serverless}/explore/siem-field-reference.asciidoc[leveloffset=+3] - -include::{security-serverless}/dashboards/dashboards-overview.asciidoc[leveloffset=+2] -include::{security-serverless}/dashboards/overview-dashboard.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/detection-response-dashboard.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/kubernetes-dashboard-dash.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/cloud-posture-dashboard-dash.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/detection-entity-dashboard.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/data-quality-dash.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/vuln-management-dashboard-dash.asciidoc[leveloffset=+3] -include::{security-serverless}/dashboards/rule-monitoring-dashboard.asciidoc[leveloffset=+3] - -include::{security-serverless}/rules/detection-engine-overview.asciidoc[leveloffset=+2] -include::{security-serverless}/rules/detections-permissions-section.asciidoc[leveloffset=+3] - -include::{security-serverless}/rules/about-rules.asciidoc[leveloffset=+2] -include::{security-serverless}/rules/rules-ui-create.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/interactive-investigation-guides.asciidoc[leveloffset=+4] -include::{security-serverless}/rules/building-block-rule.asciidoc[leveloffset=+4] -include::{security-serverless}/rules/prebuilt-rules/prebuilt-rules-management.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/rules-ui-management.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/alerts-ui-monitor.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/detections-ui-exceptions.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/value-lists-exceptions.asciidoc[leveloffset=+4] -include::{security-serverless}/rules/add-exceptions.asciidoc[leveloffset=+4] -include::{security-serverless}/rules/shared-exception-lists.asciidoc[leveloffset=+4] -include::{security-serverless}/rules/rules-coverage.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/tuning-detection-signals.asciidoc[leveloffset=+3] -include::{security-serverless}/rules/prebuilt-rules/prebuilt-rules.asciidoc[leveloffset=+3] - -include::{security-serverless}/alerts/alerts-ui-manage.asciidoc[leveloffset=+2] -include::{security-serverless}/alerts/visualize-alerts.asciidoc[leveloffset=+3] -include::{security-serverless}/alerts/view-alert-details.asciidoc[leveloffset=+3] -include::{security-serverless}/alerts/signals-to-cases.asciidoc[leveloffset=+3] -include::{security-serverless}/alerts/alert-suppression.asciidoc[leveloffset=+3] -include::{security-serverless}/alerts/reduce-notifications-alerts.asciidoc[leveloffset=+3] -include::{security-serverless}/alerts/query-alert-indices.asciidoc[leveloffset=+3] -include::{security-serverless}/alerts/alert-schema.asciidoc[leveloffset=+3] - -include::{security-serverless}/advanced-entity-analytics/advanced-entity-analytics-overview.asciidoc[leveloffset=+2] -include::{security-serverless}/advanced-entity-analytics/entity-risk-scoring.asciidoc[leveloffset=+3] -include::{security-serverless}/advanced-entity-analytics/ers-req.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/asset-criticality.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/turn-on-risk-engine.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/analyze-risk-score-data.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/advanced-behavioral-detections.asciidoc[leveloffset=+3] -include::{security-serverless}/advanced-entity-analytics/ml-requirements.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/machine-learning.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/tuning-anomaly-results.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/behavioral-detection-use-cases.asciidoc[leveloffset=+4] -include::{security-serverless}/advanced-entity-analytics/prebuilt-ml-jobs.asciidoc[leveloffset=+4] - -include::{security-serverless}/investigate/investigate-events.asciidoc[leveloffset=+2] -include::{security-serverless}/investigate/timelines-ui.asciidoc[leveloffset=+3] -include::{security-serverless}/investigate/timeline-templates-ui.asciidoc[leveloffset=+4] -include::{security-serverless}/investigate/timeline-object-schema.asciidoc[leveloffset=+4] -include::{security-serverless}/alerts/visual-event-analyzer.asciidoc[leveloffset=+3] -include::{security-serverless}/cloud-native-security/session-view.asciidoc[leveloffset=+3] -include::{security-serverless}/osquery/use-osquery.asciidoc[leveloffset=+3] -include::{security-serverless}/osquery/osquery-response-action.asciidoc[leveloffset=+4] -include::{security-serverless}/osquery/invest-guide-run-osquery.asciidoc[leveloffset=+4] -include::{security-serverless}/osquery/alerts-run-osquery.asciidoc[leveloffset=+4] -include::{security-serverless}/osquery/view-osquery-results.asciidoc[leveloffset=+4] -include::{security-serverless}/osquery/osquery-placeholder-fields.asciidoc[leveloffset=+4] -include::{security-serverless}/investigate/indicators-of-compromise.asciidoc[leveloffset=+3] -include::{security-serverless}/investigate/cases-overview.asciidoc[leveloffset=+3] -include::{security-serverless}/investigate/case-permissions.asciidoc[leveloffset=+4] -include::{security-serverless}/investigate/cases-open-manage.asciidoc[leveloffset=+4] -include::{security-serverless}/investigate/cases-settings.asciidoc[leveloffset=+4] - -include::{security-serverless}/assets/asset-management.asciidoc[leveloffset=+2] - -include::{security-serverless}/settings/manage-settings.asciidoc[leveloffset=+2] -include::{security-serverless}/settings/project-settings.asciidoc[leveloffset=+3] -include::{security-serverless}/settings/advanced-settings.asciidoc[leveloffset=+3] - -include::{security-serverless}/troubleshooting/troubleshooting-intro.asciidoc[leveloffset=+2] -include::{security-serverless}/troubleshooting/ts-detection-rules.asciidoc[leveloffset=+3] -include::{security-serverless}/troubleshooting/troubleshoot-endpoints.asciidoc[leveloffset=+3] - -include::{security-serverless}/technical-preview-limitations.asciidoc[leveloffset=+2] - -[[developer-tools]] -== Dev tools - -include::{devtools-serverless}/general-developer-tools.asciidoc[leveloffset=+2] - -include::{devtools-serverless}/run-api-requests-in-the-console.asciidoc[leveloffset=+2] - -include::{devtools-serverless}/profile-queries-and-aggregations.asciidoc[leveloffset=+2] - -include::{devtools-serverless}/debug-grok-expressions.asciidoc[leveloffset=+2] - -include::{devtools-serverless}/debug-painless-scripts.asciidoc[leveloffset=+2] - -include::{devtools-serverless}/developer-tools-troubleshooting.asciidoc[leveloffset=+2] - -[[project-and-management-settings]] -== Project and management settings - -include::{project-settings-serverless}/project-and-management-settings.asciidoc[leveloffset=+2] - -include::{project-settings-serverless}/project-settings.asciidoc[leveloffset=+2] -include::{project-settings-serverless}/api-keys.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/action-connectors.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/custom-roles.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/data-views.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/files.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/index-management.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/ingest-pipelines.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/logstash-pipelines.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/machine-learning.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/maintenance-windows.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/maps.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/reports.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/rules.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/saved-objects.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/spaces.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/tags.asciidoc[leveloffset=+3] -include::{project-settings-serverless}/transforms.asciidoc[leveloffset=+3] - -include::{project-settings-serverless}/integrations.asciidoc[leveloffset=+2] - -include::{project-settings-serverless}/fleet-and-elastic-agent.asciidoc[leveloffset=+2] +include::{general-serverless}/index.asciidoc[] +include::{elasticsearch-serverless}/index.asciidoc[] +include::{observability-serverless}/index.asciidoc[] +include::{security-serverless}/index.asciidoc[] +include::{devtools-serverless}/index.asciidoc[] +include::{project-settings-serverless}/index.asciidoc[] // Hidden pages include::{elasticsearch-serverless}/explore-your-data-visualize-your-data-create-dashboards.asciidoc[leveloffset=+1] diff --git a/serverless/pages/devtools/index.asciidoc b/serverless/pages/devtools/index.asciidoc index a40fcfc485..af192f9c03 100644 --- a/serverless/pages/devtools/index.asciidoc +++ b/serverless/pages/devtools/index.asciidoc @@ -3,14 +3,17 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] include::{docs-root}/shared/attributes.asciidoc[] -= Dev tools +[[developer-tools]] +== Dev tools -include::./run-api-requests-in-the-console.asciidoc[leveloffset=+1] +include::./general-developer-tools.asciidoc[leveloffset=+2] -include::./profile-queries-and-aggregations.asciidoc[leveloffset=+1] +include::./run-api-requests-in-the-console.asciidoc[leveloffset=+2] -include::./debug-grok-expressions.asciidoc[leveloffset=+1] +include::./profile-queries-and-aggregations.asciidoc[leveloffset=+2] -include::./debug-painless-scripts.asciidoc[leveloffset=+1] +include::./debug-grok-expressions.asciidoc[leveloffset=+2] -include::./developer-tools-troubleshooting.asciidoc[leveloffset=+1] +include::./debug-painless-scripts.asciidoc[leveloffset=+2] + +include::./developer-tools-troubleshooting.asciidoc[leveloffset=+2] diff --git a/serverless/pages/elasticsearch/index.asciidoc b/serverless/pages/elasticsearch/index.asciidoc new file mode 100644 index 0000000000..478fff6208 --- /dev/null +++ b/serverless/pages/elasticsearch/index.asciidoc @@ -0,0 +1,58 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +[[what-is-elasticsearch-serverless]] +== Elasticsearch + +++++ +Elasticsearch +++++ + +include::./what-is-elasticsearch-serverless.asciidoc[leveloffset=+2] + +include::./pricing.asciidoc[leveloffset=+2] + +include::./get-started.asciidoc[leveloffset=+2] + +include::./clients.asciidoc[leveloffset=+2] +include::./clients-go-getting-started.asciidoc[leveloffset=+3] +include::./clients-java-getting-started.asciidoc[leveloffset=+3] +include::./clients-dot-net-getting-started.asciidoc[leveloffset=+3] +include::./clients-nodejs-getting-started.asciidoc[leveloffset=+3] +include::./clients-php-getting-started.asciidoc[leveloffset=+3] +include::./clients-python-getting-started.asciidoc[leveloffset=+3] +include::./clients-ruby-getting-started.asciidoc[leveloffset=+3] + +include::./apis-http-apis.asciidoc[leveloffset=+2] +include::./apis-elasticsearch-conventions.asciidoc[leveloffset=+3] +include::./apis-kibana-conventions.asciidoc[leveloffset=+3] + +include::./elasticsearch-developer-tools.asciidoc[leveloffset=+2] + +include::./ingest-your-data.asciidoc[leveloffset=+2] +include::./ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+3] +include::./ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+3] +include::./ingest-your-data-upload-file.asciidoc[leveloffset=+3] +include::./ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+3] +include::./ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+3] + +include::./search-your-data.asciidoc[leveloffset=+2] +include::./search-your-data-the-search-api.asciidoc[leveloffset=+3] +include::./search-with-synonyms.asciidoc[leveloffset=+3] +include::./knn-search.asciidoc[leveloffset=+3] +include::./search-your-data-semantic-search.asciidoc[leveloffset=+3] +include::./search-your-data-semantic-search-elser.asciidoc[leveloffset=+4] + +include::./explore-your-data.asciidoc[leveloffset=+2] +include::./explore-your-data-the-aggregations-api.asciidoc[leveloffset=+3] +include::./explore-your-data-discover-your-data.asciidoc[leveloffset=+3] +include::./explore-your-data-visualize-your-data.asciidoc[leveloffset=+3] +include::./explore-your-data-alerting.asciidoc[leveloffset=+3] + +include::./search-playground.asciidoc[leveloffset=+2] + +include::./serverless-differences.asciidoc[leveloffset=+2] + +include::./technical-preview-limitations.asciidoc[leveloffset=+2] diff --git a/serverless/pages/general/index.asciidoc b/serverless/pages/general/index.asciidoc new file mode 100644 index 0000000000..7814def389 --- /dev/null +++ b/serverless/pages/general/index.asciidoc @@ -0,0 +1,34 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +[[intro]] +== Welcome to Elastic serverless + +include::{docs-content-root}/serverless/pages/welcome-to-serverless.asciidoc[leveloffset=+2] + +include::./what-is-serverless.asciidoc[leveloffset=+2] + +include::./sign-up.asciidoc[leveloffset=+2] + +include::./manage-org.asciidoc[leveloffset=+2] +include::./manage-access-to-org.asciidoc[leveloffset=+3] +include::./manage-access-to-org-user-roles.asciidoc[leveloffset=+3] +include::./manage-access-to-org-from-existing-account.asciidoc[leveloffset=+3] + +include::./manage-your-project.asciidoc[leveloffset=+2] +include::./manage-your-project-rest-api.asciidoc[leveloffset=+3] + +include::./manage-billing.asciidoc[leveloffset=+2] +include::./manage-billing-check-subscription.asciidoc[leveloffset=+3] +include::./manage-billing-monitor-usage.asciidoc[leveloffset=+3] +include::./manage-billing-history.asciidoc[leveloffset=+3] +include::./manage-billing-pricing-model.asciidoc[leveloffset=+3] +include::./manage-billing-stop-project.asciidoc[leveloffset=+3] + +include::./service-status.asciidoc[leveloffset=+2] + +include::./user-profile.asciidoc[leveloffset=+2] + +include::./cloud-regions.asciidoc[leveloffset=+2] diff --git a/serverless/pages/project-settings/index.asciidoc b/serverless/pages/project-settings/index.asciidoc index 7f6ffb158b..c3890f0702 100644 --- a/serverless/pages/project-settings/index.asciidoc +++ b/serverless/pages/project-settings/index.asciidoc @@ -3,27 +3,30 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] include::{docs-root}/shared/attributes.asciidoc[] -= Project and management settings +[[project-and-management-settings]] +== Project and management settings -include::./project-settings.asciidoc[leveloffset=+1] -include::./api-keys.asciidoc[leveloffset=+2] -include::./action-connectors.asciidoc[leveloffset=+2] -include::./custom-roles.asciidoc[leveloffset=+2] -include::./data-views.asciidoc[leveloffset=+2] -include::./files.asciidoc[leveloffset=+2] -include::./index-management.asciidoc[leveloffset=+2] -include::./ingest-pipelines.asciidoc[leveloffset=+2] -include::./logstash-pipelines.asciidoc[leveloffset=+2] -include::./machine-learning.asciidoc[leveloffset=+2] -include::./maintenance-windows.asciidoc[leveloffset=+2] -include::./maps.asciidoc[leveloffset=+2] -include::./reports.asciidoc[leveloffset=+2] -include::./rules.asciidoc[leveloffset=+2] -include::./saved-objects.asciidoc[leveloffset=+2] -include::./spaces.asciidoc[leveloffset=+2] -include::./tags.asciidoc[leveloffset=+2] -include::./transforms.asciidoc[leveloffset=+2] +include::./project-and-management-settings.asciidoc[leveloffset=+2] -include::./integrations.asciidoc[leveloffset=+1] +include::./project-settings.asciidoc[leveloffset=+2] +include::./api-keys.asciidoc[leveloffset=+3] +include::./action-connectors.asciidoc[leveloffset=+3] +include::./custom-roles.asciidoc[leveloffset=+3] +include::./data-views.asciidoc[leveloffset=+3] +include::./files.asciidoc[leveloffset=+3] +include::./index-management.asciidoc[leveloffset=+3] +include::./ingest-pipelines.asciidoc[leveloffset=+3] +include::./logstash-pipelines.asciidoc[leveloffset=+3] +include::./machine-learning.asciidoc[leveloffset=+3] +include::./maintenance-windows.asciidoc[leveloffset=+3] +include::./maps.asciidoc[leveloffset=+3] +include::./reports.asciidoc[leveloffset=+3] +include::./rules.asciidoc[leveloffset=+3] +include::./saved-objects.asciidoc[leveloffset=+3] +include::./spaces.asciidoc[leveloffset=+3] +include::./tags.asciidoc[leveloffset=+3] +include::./transforms.asciidoc[leveloffset=+3] -include::./fleet-and-elastic-agent.asciidoc[leveloffset=+1] +include::./integrations.asciidoc[leveloffset=+2] + +include::./fleet-and-elastic-agent.asciidoc[leveloffset=+2] From 5b5f6749ce7f9fd0a5c723bfb573d2f07fd5068a Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 13:41:00 -0600 Subject: [PATCH 20/25] update readme --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fcd0a31a31..3f13402ecd 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # docs-content -This repo contains most of our Elastic Serverless documentation, including: +This repo contains most of our Elastic Serverless documentation, including: - [Overview content](https://www.elastic.co/docs/current/serverless) - [Elasticsearch](https://www.elastic.co/docs/current/serverless/elasticsearch/what-is-elasticsearch-serverless) @@ -8,7 +8,7 @@ This repo contains most of our Elastic Serverless documentation, including: - [Project and management settings](https://www.elastic.co/docs/current/serverless/project-and-management-settings) > Not the docs you're looking for? Try the following: -> +> > - For Elastic Observability, visit [elastic/observability-docs](https://github.com/elastic/observability-docs). > - For Elastic Security, visit [elastic/security-docs](https://github.com/elastic/security-docs). > - For all other documentation, click the **✏️ edit** button on any page to jump to its source in GitHub. @@ -21,7 +21,7 @@ All documentation pull requests automatically add the [@platform-docs](https://g If you find any bugs in our documentation, or want to request an enhancement, then you can open an issue using our template. We also welcome contributions in the form of PRs. Before you submit a PR, make sure that you have signed our [Contributor License Agreement](https://www.elastic.co/contributor-agreement/). -This documentation uses a custom syntax written in [MDX](https://mdxjs.com/). In many cases, you only need to know plain markdown to contribute. We'll add a public component reference and additional contribution guidelines in future. Elasticians can refer to our [internal syntax reference](https://docs.elastic.dev/docsmobile/syntax). +This documentation uses [AsciiDoc](https://github.com/elastic/docs?tab=readme-ov-file#asciidoc-guide). ## Preview the docs From 4cf695e415f44a2fcd66611469aece9d32fb8446 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 15:01:06 -0600 Subject: [PATCH 21/25] flatten into one pages directory again --- serverless/index-serverless-devtools.asciidoc | 19 ++++++ .../index-serverless-elasticsearch.asciidoc | 58 +++++++++++++++++++ serverless/index-serverless-general.asciidoc | 34 +++++++++++ ...index-serverless-project-settings.asciidoc | 32 ++++++++++ serverless/index.asciidoc | 18 +++--- .../action-connectors.asciidoc | 0 .../action-connectors.mdx | 0 .../{project-settings => }/api-keys.asciidoc | 0 .../pages/{project-settings => }/api-keys.mdx | 0 .../apis-elasticsearch-conventions.asciidoc | 0 .../apis-elasticsearch-conventions.mdx | 0 .../apis-http-apis.asciidoc | 0 .../{elasticsearch => }/apis-http-apis.mdx | 0 .../apis-kibana-conventions.asciidoc | 0 .../apis-kibana-conventions.mdx | 0 .../clients-dot-net-getting-started.asciidoc | 0 .../clients-dot-net-getting-started.mdx | 0 .../clients-go-getting-started.asciidoc | 0 .../clients-go-getting-started.mdx | 0 .../clients-java-getting-started.asciidoc | 0 .../clients-java-getting-started.mdx | 0 .../clients-nodejs-getting-started.asciidoc | 0 .../clients-nodejs-getting-started.mdx | 0 .../clients-php-getting-started.asciidoc | 0 .../clients-php-getting-started.mdx | 0 .../clients-python-getting-started.asciidoc | 0 .../clients-python-getting-started.mdx | 0 .../clients-ruby-getting-started.asciidoc | 0 .../clients-ruby-getting-started.mdx | 0 .../{elasticsearch => }/clients.asciidoc | 0 .../pages/{elasticsearch => }/clients.mdx | 0 .../{general => }/cloud-regions.asciidoc | 0 .../pages/{general => }/cloud-regions.mdx | 0 .../custom-roles.asciidoc | 0 .../{project-settings => }/custom-roles.mdx | 0 .../data-views.asciidoc | 0 .../{project-settings => }/data-views.mdx | 0 .../debug-grok-expressions.asciidoc | 0 .../{devtools => }/debug-grok-expressions.mdx | 0 .../debug-painless-scripts.asciidoc | 0 .../{devtools => }/debug-painless-scripts.mdx | 0 .../developer-tools-troubleshooting.asciidoc | 0 .../developer-tools-troubleshooting.mdx | 0 serverless/pages/devtools/index.asciidoc | 19 ------ .../elasticsearch-developer-tools.asciidoc | 0 .../elasticsearch-developer-tools.mdx | 0 serverless/pages/elasticsearch/index.asciidoc | 58 ------------------- .../explore-your-data-alerting.asciidoc | 0 .../explore-your-data-alerting.mdx | 0 ...lore-your-data-discover-your-data.asciidoc | 0 .../explore-your-data-discover-your-data.mdx | 0 ...re-your-data-the-aggregations-api.asciidoc | 0 ...explore-your-data-the-aggregations-api.mdx | 0 ...alize-your-data-create-dashboards.asciidoc | 0 ...-visualize-your-data-create-dashboards.mdx | 0 ...e-your-data-create-visualizations.asciidoc | 0 ...ualize-your-data-create-visualizations.mdx | 0 ...ore-your-data-visualize-your-data.asciidoc | 0 .../explore-your-data-visualize-your-data.mdx | 0 .../explore-your-data.asciidoc | 0 .../{elasticsearch => }/explore-your-data.mdx | 0 .../{project-settings => }/files.asciidoc | 0 .../pages/{project-settings => }/files.mdx | 0 .../fleet-and-elastic-agent.asciidoc | 0 .../fleet-and-elastic-agent.mdx | 0 .../general-developer-tools.asciidoc | 0 .../general-developer-tools.mdx | 0 serverless/pages/general/index.asciidoc | 34 ----------- .../{elasticsearch => }/get-started.asciidoc | 2 +- .../pages/{elasticsearch => }/get-started.mdx | 0 ...data-ml-nlp-deploy-trained-models.asciidoc | 8 +-- ...explore-your-data-ml-nlp-examples.asciidoc | 4 +- ...plore-your-data-ml-nlp-lang-ident.asciidoc | 2 +- ...xplore-your-data-ml-nlp-model-ref.asciidoc | 2 +- ...lore-your-data-ml-nlp-ootb-models.asciidoc | 4 +- ...ore-your-data-ml-nlp-select-model.asciidoc | 6 +- ...e-your-data-ml-nlp-test-inference.asciidoc | 2 +- .../hidden/explore-your-data-ml-nlp.asciidoc | 8 +-- .../index-management.asciidoc | 0 .../index-management.mdx | 0 .../ingest-pipelines.asciidoc | 0 .../ingest-pipelines.mdx | 0 ...your-data-ingest-data-through-api.asciidoc | 0 ...gest-your-data-ingest-data-through-api.mdx | 0 ...t-data-through-integrations-beats.asciidoc | 0 ...ingest-data-through-integrations-beats.mdx | 0 ...ugh-integrations-connector-client.asciidoc | 0 ...-through-integrations-connector-client.mdx | 0 ...ata-through-integrations-logstash.asciidoc | 0 ...est-data-through-integrations-logstash.mdx | 0 .../ingest-your-data-upload-file.asciidoc | 0 .../ingest-your-data-upload-file.mdx | 0 .../ingest-your-data.asciidoc | 0 .../{elasticsearch => }/ingest-your-data.mdx | 0 .../integrations.asciidoc | 0 .../{project-settings => }/integrations.mdx | 0 .../{elasticsearch => }/knn-search.asciidoc | 0 .../pages/{elasticsearch => }/knn-search.mdx | 0 .../logstash-pipelines.asciidoc | 0 .../logstash-pipelines.mdx | 0 .../machine-learning.asciidoc | 0 .../machine-learning.mdx | 0 .../maintenance-windows.asciidoc | 0 .../maintenance-windows.mdx | 0 ...cess-to-org-from-existing-account.asciidoc | 0 ...ge-access-to-org-from-existing-account.mdx | 0 .../manage-access-to-org-user-roles.asciidoc | 0 .../manage-access-to-org-user-roles.mdx | 0 .../manage-access-to-org.asciidoc | 0 .../{general => }/manage-access-to-org.mdx | 0 ...manage-billing-check-subscription.asciidoc | 0 .../manage-billing-check-subscription.mdx | 0 .../manage-billing-history.asciidoc | 0 .../{general => }/manage-billing-history.mdx | 0 .../manage-billing-monitor-usage.asciidoc | 0 .../manage-billing-monitor-usage.mdx | 0 .../manage-billing-pricing-model.asciidoc | 0 .../manage-billing-pricing-model.mdx | 0 .../manage-billing-stop-project.asciidoc | 0 .../manage-billing-stop-project.mdx | 0 .../{general => }/manage-billing.asciidoc | 0 .../pages/{general => }/manage-billing.mdx | 0 .../pages/{general => }/manage-org.asciidoc | 0 serverless/pages/{general => }/manage-org.mdx | 0 .../manage-your-project-rest-api.asciidoc | 0 .../manage-your-project-rest-api.mdx | 0 .../manage-your-project.asciidoc | 0 .../{general => }/manage-your-project.mdx | 0 .../{project-settings => }/maps.asciidoc | 0 .../pages/{project-settings => }/maps.mdx | 0 .../{elasticsearch => }/pricing.asciidoc | 2 +- .../pages/{elasticsearch => }/pricing.mdx | 0 .../profile-queries-and-aggregations.asciidoc | 0 .../profile-queries-and-aggregations.mdx | 0 .../project-and-management-settings.asciidoc | 0 .../project-and-management-settings.mdx | 0 .../project-settings.asciidoc | 0 .../project-settings.mdx | 0 .../pages/project-settings/index.asciidoc | 32 ---------- .../{project-settings => }/reports.asciidoc | 0 .../pages/{project-settings => }/reports.mdx | 0 .../{project-settings => }/rules.asciidoc | 0 .../pages/{project-settings => }/rules.mdx | 0 .../run-api-requests-in-the-console.asciidoc | 0 .../run-api-requests-in-the-console.mdx | 0 .../saved-objects.asciidoc | 0 .../{project-settings => }/saved-objects.mdx | 0 .../search-playground.asciidoc | 0 .../{elasticsearch => }/search-playground.mdx | 0 .../search-with-synonyms.asciidoc | 0 .../search-with-synonyms.mdx | 0 ...h-your-data-semantic-search-elser.asciidoc | 0 ...search-your-data-semantic-search-elser.mdx | 0 .../search-your-data-semantic-search.asciidoc | 10 ++-- .../search-your-data-semantic-search.mdx | 0 .../search-your-data-the-search-api.asciidoc | 0 .../search-your-data-the-search-api.mdx | 0 .../search-your-data.asciidoc | 0 .../{elasticsearch => }/search-your-data.mdx | 0 .../serverless-differences.asciidoc | 0 .../serverless-differences.mdx | 0 .../{general => }/service-status.asciidoc | 0 .../pages/{general => }/service-status.mdx | 0 .../pages/{general => }/sign-up.asciidoc | 0 serverless/pages/{general => }/sign-up.mdx | 0 .../{project-settings => }/spaces.asciidoc | 0 .../pages/{project-settings => }/spaces.mdx | 0 .../{project-settings => }/tags.asciidoc | 0 .../pages/{project-settings => }/tags.mdx | 0 .../technical-preview-limitations.asciidoc | 0 .../technical-preview-limitations.mdx | 0 .../transforms.asciidoc | 0 .../{project-settings => }/transforms.mdx | 0 .../pages/{general => }/user-profile.asciidoc | 0 .../pages/{general => }/user-profile.mdx | 0 .../{general => }/visualize-library.asciidoc | 0 .../pages/{general => }/visualize-library.mdx | 0 .../what-is-elasticsearch-serverless.asciidoc | 0 .../what-is-elasticsearch-serverless.mdx | 0 .../{general => }/what-is-serverless.asciidoc | 0 .../{general => }/what-is-serverless.mdx | 0 .../partials/field-mappings-elser.asciidoc | 2 +- .../generate-embeddings-elser.asciidoc | 2 +- .../partials/search-dense-vector.asciidoc | 2 +- 184 files changed, 178 insertions(+), 182 deletions(-) create mode 100644 serverless/index-serverless-devtools.asciidoc create mode 100644 serverless/index-serverless-elasticsearch.asciidoc create mode 100644 serverless/index-serverless-general.asciidoc create mode 100644 serverless/index-serverless-project-settings.asciidoc rename serverless/pages/{project-settings => }/action-connectors.asciidoc (100%) rename serverless/pages/{project-settings => }/action-connectors.mdx (100%) rename serverless/pages/{project-settings => }/api-keys.asciidoc (100%) rename serverless/pages/{project-settings => }/api-keys.mdx (100%) rename serverless/pages/{elasticsearch => }/apis-elasticsearch-conventions.asciidoc (100%) rename serverless/pages/{elasticsearch => }/apis-elasticsearch-conventions.mdx (100%) rename serverless/pages/{elasticsearch => }/apis-http-apis.asciidoc (100%) rename serverless/pages/{elasticsearch => }/apis-http-apis.mdx (100%) rename serverless/pages/{elasticsearch => }/apis-kibana-conventions.asciidoc (100%) rename serverless/pages/{elasticsearch => }/apis-kibana-conventions.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-dot-net-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-dot-net-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-go-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-go-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-java-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-java-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-nodejs-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-nodejs-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-php-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-php-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-python-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-python-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients-ruby-getting-started.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients-ruby-getting-started.mdx (100%) rename serverless/pages/{elasticsearch => }/clients.asciidoc (100%) rename serverless/pages/{elasticsearch => }/clients.mdx (100%) rename serverless/pages/{general => }/cloud-regions.asciidoc (100%) rename serverless/pages/{general => }/cloud-regions.mdx (100%) rename serverless/pages/{project-settings => }/custom-roles.asciidoc (100%) rename serverless/pages/{project-settings => }/custom-roles.mdx (100%) rename serverless/pages/{project-settings => }/data-views.asciidoc (100%) rename serverless/pages/{project-settings => }/data-views.mdx (100%) rename serverless/pages/{devtools => }/debug-grok-expressions.asciidoc (100%) rename serverless/pages/{devtools => }/debug-grok-expressions.mdx (100%) rename serverless/pages/{devtools => }/debug-painless-scripts.asciidoc (100%) rename serverless/pages/{devtools => }/debug-painless-scripts.mdx (100%) rename serverless/pages/{devtools => }/developer-tools-troubleshooting.asciidoc (100%) rename serverless/pages/{devtools => }/developer-tools-troubleshooting.mdx (100%) delete mode 100644 serverless/pages/devtools/index.asciidoc rename serverless/pages/{elasticsearch => }/elasticsearch-developer-tools.asciidoc (100%) rename serverless/pages/{elasticsearch => }/elasticsearch-developer-tools.mdx (100%) delete mode 100644 serverless/pages/elasticsearch/index.asciidoc rename serverless/pages/{elasticsearch => }/explore-your-data-alerting.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-alerting.mdx (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-discover-your-data.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-discover-your-data.mdx (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-the-aggregations-api.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-the-aggregations-api.mdx (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-visualize-your-data-create-dashboards.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-visualize-your-data-create-dashboards.mdx (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-visualize-your-data-create-visualizations.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-visualize-your-data-create-visualizations.mdx (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-visualize-your-data.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data-visualize-your-data.mdx (100%) rename serverless/pages/{elasticsearch => }/explore-your-data.asciidoc (100%) rename serverless/pages/{elasticsearch => }/explore-your-data.mdx (100%) rename serverless/pages/{project-settings => }/files.asciidoc (100%) rename serverless/pages/{project-settings => }/files.mdx (100%) rename serverless/pages/{project-settings => }/fleet-and-elastic-agent.asciidoc (100%) rename serverless/pages/{project-settings => }/fleet-and-elastic-agent.mdx (100%) rename serverless/pages/{devtools => }/general-developer-tools.asciidoc (100%) rename serverless/pages/{devtools => }/general-developer-tools.mdx (100%) delete mode 100644 serverless/pages/general/index.asciidoc rename serverless/pages/{elasticsearch => }/get-started.asciidoc (99%) rename serverless/pages/{elasticsearch => }/get-started.mdx (100%) rename serverless/pages/{project-settings => }/index-management.asciidoc (100%) rename serverless/pages/{project-settings => }/index-management.mdx (100%) rename serverless/pages/{project-settings => }/ingest-pipelines.asciidoc (100%) rename serverless/pages/{project-settings => }/ingest-pipelines.mdx (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-api.asciidoc (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-api.mdx (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-integrations-beats.asciidoc (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-integrations-beats.mdx (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-integrations-connector-client.mdx (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-ingest-data-through-integrations-logstash.mdx (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-upload-file.asciidoc (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data-upload-file.mdx (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data.asciidoc (100%) rename serverless/pages/{elasticsearch => }/ingest-your-data.mdx (100%) rename serverless/pages/{project-settings => }/integrations.asciidoc (100%) rename serverless/pages/{project-settings => }/integrations.mdx (100%) rename serverless/pages/{elasticsearch => }/knn-search.asciidoc (100%) rename serverless/pages/{elasticsearch => }/knn-search.mdx (100%) rename serverless/pages/{project-settings => }/logstash-pipelines.asciidoc (100%) rename serverless/pages/{project-settings => }/logstash-pipelines.mdx (100%) rename serverless/pages/{project-settings => }/machine-learning.asciidoc (100%) rename serverless/pages/{project-settings => }/machine-learning.mdx (100%) rename serverless/pages/{project-settings => }/maintenance-windows.asciidoc (100%) rename serverless/pages/{project-settings => }/maintenance-windows.mdx (100%) rename serverless/pages/{general => }/manage-access-to-org-from-existing-account.asciidoc (100%) rename serverless/pages/{general => }/manage-access-to-org-from-existing-account.mdx (100%) rename serverless/pages/{general => }/manage-access-to-org-user-roles.asciidoc (100%) rename serverless/pages/{general => }/manage-access-to-org-user-roles.mdx (100%) rename serverless/pages/{general => }/manage-access-to-org.asciidoc (100%) rename serverless/pages/{general => }/manage-access-to-org.mdx (100%) rename serverless/pages/{general => }/manage-billing-check-subscription.asciidoc (100%) rename serverless/pages/{general => }/manage-billing-check-subscription.mdx (100%) rename serverless/pages/{general => }/manage-billing-history.asciidoc (100%) rename serverless/pages/{general => }/manage-billing-history.mdx (100%) rename serverless/pages/{general => }/manage-billing-monitor-usage.asciidoc (100%) rename serverless/pages/{general => }/manage-billing-monitor-usage.mdx (100%) rename serverless/pages/{general => }/manage-billing-pricing-model.asciidoc (100%) rename serverless/pages/{general => }/manage-billing-pricing-model.mdx (100%) rename serverless/pages/{general => }/manage-billing-stop-project.asciidoc (100%) rename serverless/pages/{general => }/manage-billing-stop-project.mdx (100%) rename serverless/pages/{general => }/manage-billing.asciidoc (100%) rename serverless/pages/{general => }/manage-billing.mdx (100%) rename serverless/pages/{general => }/manage-org.asciidoc (100%) rename serverless/pages/{general => }/manage-org.mdx (100%) rename serverless/pages/{general => }/manage-your-project-rest-api.asciidoc (100%) rename serverless/pages/{general => }/manage-your-project-rest-api.mdx (100%) rename serverless/pages/{general => }/manage-your-project.asciidoc (100%) rename serverless/pages/{general => }/manage-your-project.mdx (100%) rename serverless/pages/{project-settings => }/maps.asciidoc (100%) rename serverless/pages/{project-settings => }/maps.mdx (100%) rename serverless/pages/{elasticsearch => }/pricing.asciidoc (98%) rename serverless/pages/{elasticsearch => }/pricing.mdx (100%) rename serverless/pages/{devtools => }/profile-queries-and-aggregations.asciidoc (100%) rename serverless/pages/{devtools => }/profile-queries-and-aggregations.mdx (100%) rename serverless/pages/{project-settings => }/project-and-management-settings.asciidoc (100%) rename serverless/pages/{project-settings => }/project-and-management-settings.mdx (100%) rename serverless/pages/{project-settings => }/project-settings.asciidoc (100%) rename serverless/pages/{project-settings => }/project-settings.mdx (100%) delete mode 100644 serverless/pages/project-settings/index.asciidoc rename serverless/pages/{project-settings => }/reports.asciidoc (100%) rename serverless/pages/{project-settings => }/reports.mdx (100%) rename serverless/pages/{project-settings => }/rules.asciidoc (100%) rename serverless/pages/{project-settings => }/rules.mdx (100%) rename serverless/pages/{devtools => }/run-api-requests-in-the-console.asciidoc (100%) rename serverless/pages/{devtools => }/run-api-requests-in-the-console.mdx (100%) rename serverless/pages/{project-settings => }/saved-objects.asciidoc (100%) rename serverless/pages/{project-settings => }/saved-objects.mdx (100%) rename serverless/pages/{elasticsearch => }/search-playground.asciidoc (100%) rename serverless/pages/{elasticsearch => }/search-playground.mdx (100%) rename serverless/pages/{elasticsearch => }/search-with-synonyms.asciidoc (100%) rename serverless/pages/{elasticsearch => }/search-with-synonyms.mdx (100%) rename serverless/pages/{elasticsearch => }/search-your-data-semantic-search-elser.asciidoc (100%) rename serverless/pages/{elasticsearch => }/search-your-data-semantic-search-elser.mdx (100%) rename serverless/pages/{elasticsearch => }/search-your-data-semantic-search.asciidoc (95%) rename serverless/pages/{elasticsearch => }/search-your-data-semantic-search.mdx (100%) rename serverless/pages/{elasticsearch => }/search-your-data-the-search-api.asciidoc (100%) rename serverless/pages/{elasticsearch => }/search-your-data-the-search-api.mdx (100%) rename serverless/pages/{elasticsearch => }/search-your-data.asciidoc (100%) rename serverless/pages/{elasticsearch => }/search-your-data.mdx (100%) rename serverless/pages/{elasticsearch => }/serverless-differences.asciidoc (100%) rename serverless/pages/{elasticsearch => }/serverless-differences.mdx (100%) rename serverless/pages/{general => }/service-status.asciidoc (100%) rename serverless/pages/{general => }/service-status.mdx (100%) rename serverless/pages/{general => }/sign-up.asciidoc (100%) rename serverless/pages/{general => }/sign-up.mdx (100%) rename serverless/pages/{project-settings => }/spaces.asciidoc (100%) rename serverless/pages/{project-settings => }/spaces.mdx (100%) rename serverless/pages/{project-settings => }/tags.asciidoc (100%) rename serverless/pages/{project-settings => }/tags.mdx (100%) rename serverless/pages/{elasticsearch => }/technical-preview-limitations.asciidoc (100%) rename serverless/pages/{elasticsearch => }/technical-preview-limitations.mdx (100%) rename serverless/pages/{project-settings => }/transforms.asciidoc (100%) rename serverless/pages/{project-settings => }/transforms.mdx (100%) rename serverless/pages/{general => }/user-profile.asciidoc (100%) rename serverless/pages/{general => }/user-profile.mdx (100%) rename serverless/pages/{general => }/visualize-library.asciidoc (100%) rename serverless/pages/{general => }/visualize-library.mdx (100%) rename serverless/pages/{elasticsearch => }/what-is-elasticsearch-serverless.asciidoc (100%) rename serverless/pages/{elasticsearch => }/what-is-elasticsearch-serverless.mdx (100%) rename serverless/pages/{general => }/what-is-serverless.asciidoc (100%) rename serverless/pages/{general => }/what-is-serverless.mdx (100%) diff --git a/serverless/index-serverless-devtools.asciidoc b/serverless/index-serverless-devtools.asciidoc new file mode 100644 index 0000000000..f5aa095894 --- /dev/null +++ b/serverless/index-serverless-devtools.asciidoc @@ -0,0 +1,19 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +[[developer-tools]] +== Dev tools + +include::./pages/general-developer-tools.asciidoc[leveloffset=+2] + +include::./pages/run-api-requests-in-the-console.asciidoc[leveloffset=+2] + +include::./pages/profile-queries-and-aggregations.asciidoc[leveloffset=+2] + +include::./pages/debug-grok-expressions.asciidoc[leveloffset=+2] + +include::./pages/debug-painless-scripts.asciidoc[leveloffset=+2] + +include::./pages/developer-tools-troubleshooting.asciidoc[leveloffset=+2] diff --git a/serverless/index-serverless-elasticsearch.asciidoc b/serverless/index-serverless-elasticsearch.asciidoc new file mode 100644 index 0000000000..bcf3a7cc8e --- /dev/null +++ b/serverless/index-serverless-elasticsearch.asciidoc @@ -0,0 +1,58 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +[[what-is-elasticsearch-serverless]] +== Elasticsearch + +++++ +Elasticsearch +++++ + +include::./pages/what-is-elasticsearch-serverless.asciidoc[leveloffset=+2] + +include::./pages/pricing.asciidoc[leveloffset=+2] + +include::./pages/get-started.asciidoc[leveloffset=+2] + +include::./pages/clients.asciidoc[leveloffset=+2] +include::./pages/clients-go-getting-started.asciidoc[leveloffset=+3] +include::./pages/clients-java-getting-started.asciidoc[leveloffset=+3] +include::./pages/clients-dot-net-getting-started.asciidoc[leveloffset=+3] +include::./pages/clients-nodejs-getting-started.asciidoc[leveloffset=+3] +include::./pages/clients-php-getting-started.asciidoc[leveloffset=+3] +include::./pages/clients-python-getting-started.asciidoc[leveloffset=+3] +include::./pages/clients-ruby-getting-started.asciidoc[leveloffset=+3] + +include::./pages/apis-http-apis.asciidoc[leveloffset=+2] +include::./pages/apis-elasticsearch-conventions.asciidoc[leveloffset=+3] +include::./pages/apis-kibana-conventions.asciidoc[leveloffset=+3] + +include::./pages/elasticsearch-developer-tools.asciidoc[leveloffset=+2] + +include::./pages/ingest-your-data.asciidoc[leveloffset=+2] +include::./pages/ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+3] +include::./pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+3] +include::./pages/ingest-your-data-upload-file.asciidoc[leveloffset=+3] +include::./pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+3] +include::./pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+3] + +include::./pages/search-your-data.asciidoc[leveloffset=+2] +include::./pages/search-your-data-the-search-api.asciidoc[leveloffset=+3] +include::./pages/search-with-synonyms.asciidoc[leveloffset=+3] +include::./pages/knn-search.asciidoc[leveloffset=+3] +include::./pages/search-your-data-semantic-search.asciidoc[leveloffset=+3] +include::./pages/search-your-data-semantic-search-elser.asciidoc[leveloffset=+4] + +include::./pages/explore-your-data.asciidoc[leveloffset=+2] +include::./pages/explore-your-data-the-aggregations-api.asciidoc[leveloffset=+3] +include::./pages/explore-your-data-discover-your-data.asciidoc[leveloffset=+3] +include::./pages/explore-your-data-visualize-your-data.asciidoc[leveloffset=+3] +include::./pages/explore-your-data-alerting.asciidoc[leveloffset=+3] + +include::./pages/search-playground.asciidoc[leveloffset=+2] + +include::./pages/serverless-differences.asciidoc[leveloffset=+2] + +include::./pages/technical-preview-limitations.asciidoc[leveloffset=+2] diff --git a/serverless/index-serverless-general.asciidoc b/serverless/index-serverless-general.asciidoc new file mode 100644 index 0000000000..6faf611ec3 --- /dev/null +++ b/serverless/index-serverless-general.asciidoc @@ -0,0 +1,34 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +[[intro]] +== Welcome to Elastic serverless + +include::{docs-content-root}/serverless/pages/welcome-to-serverless.asciidoc[leveloffset=+2] + +include::./pages/what-is-serverless.asciidoc[leveloffset=+2] + +include::./pages/sign-up.asciidoc[leveloffset=+2] + +include::./pages/manage-org.asciidoc[leveloffset=+2] +include::./pages/manage-access-to-org.asciidoc[leveloffset=+3] +include::./pages/manage-access-to-org-user-roles.asciidoc[leveloffset=+3] +include::./pages/manage-access-to-org-from-existing-account.asciidoc[leveloffset=+3] + +include::./pages/manage-your-project.asciidoc[leveloffset=+2] +include::./pages/manage-your-project-rest-api.asciidoc[leveloffset=+3] + +include::./pages/manage-billing.asciidoc[leveloffset=+2] +include::./pages/manage-billing-check-subscription.asciidoc[leveloffset=+3] +include::./pages/manage-billing-monitor-usage.asciidoc[leveloffset=+3] +include::./pages/manage-billing-history.asciidoc[leveloffset=+3] +include::./pages/manage-billing-pricing-model.asciidoc[leveloffset=+3] +include::./pages/manage-billing-stop-project.asciidoc[leveloffset=+3] + +include::./pages/service-status.asciidoc[leveloffset=+2] + +include::./pages/user-profile.asciidoc[leveloffset=+2] + +include::./pages/cloud-regions.asciidoc[leveloffset=+2] diff --git a/serverless/index-serverless-project-settings.asciidoc b/serverless/index-serverless-project-settings.asciidoc new file mode 100644 index 0000000000..10caa90b9e --- /dev/null +++ b/serverless/index-serverless-project-settings.asciidoc @@ -0,0 +1,32 @@ +:doctype: book + +include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] +include::{docs-root}/shared/attributes.asciidoc[] + +[[project-and-management-settings]] +== Project and management settings + +include::./pages/project-and-management-settings.asciidoc[leveloffset=+2] + +include::./pages/project-settings.asciidoc[leveloffset=+2] +include::./pages/api-keys.asciidoc[leveloffset=+3] +include::./pages/action-connectors.asciidoc[leveloffset=+3] +include::./pages/custom-roles.asciidoc[leveloffset=+3] +include::./pages/data-views.asciidoc[leveloffset=+3] +include::./pages/files.asciidoc[leveloffset=+3] +include::./pages/index-management.asciidoc[leveloffset=+3] +include::./pages/ingest-pipelines.asciidoc[leveloffset=+3] +include::./pages/logstash-pipelines.asciidoc[leveloffset=+3] +include::./pages/machine-learning.asciidoc[leveloffset=+3] +include::./pages/maintenance-windows.asciidoc[leveloffset=+3] +include::./pages/maps.asciidoc[leveloffset=+3] +include::./pages/reports.asciidoc[leveloffset=+3] +include::./pages/rules.asciidoc[leveloffset=+3] +include::./pages/saved-objects.asciidoc[leveloffset=+3] +include::./pages/spaces.asciidoc[leveloffset=+3] +include::./pages/tags.asciidoc[leveloffset=+3] +include::./pages/transforms.asciidoc[leveloffset=+3] + +include::./pages/integrations.asciidoc[leveloffset=+2] + +include::./pages/fleet-and-elastic-agent.asciidoc[leveloffset=+2] diff --git a/serverless/index.asciidoc b/serverless/index.asciidoc index e4f40b14a9..97ac5031bf 100644 --- a/serverless/index.asciidoc +++ b/serverless/index.asciidoc @@ -3,12 +3,8 @@ include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] include::{asciidoc-dir}/../../shared/attributes.asciidoc[] -:general-serverless: {docs-content-root}/serverless/pages/general -:elasticsearch-serverless: {docs-content-root}/serverless/pages/elasticsearch :security-serverless: {security-docs-root}/docs/serverless :observability-serverless: {observability-docs-root}/docs/en/serverless -:devtools-serverless: {docs-content-root}/serverless/pages/devtools -:project-settings-serverless: {docs-content-root}/serverless/pages/project-settings :es-badge: <> :obs-badge: <> @@ -42,14 +38,14 @@ include::{asciidoc-dir}/../../shared/attributes.asciidoc[] = Serverless -include::{general-serverless}/index.asciidoc[] -include::{elasticsearch-serverless}/index.asciidoc[] +include::./index-serverless-general.asciidoc[] +include::./index-serverless-elasticsearch.asciidoc[] include::{observability-serverless}/index.asciidoc[] include::{security-serverless}/index.asciidoc[] -include::{devtools-serverless}/index.asciidoc[] -include::{project-settings-serverless}/index.asciidoc[] +include::./index-serverless-devtools.asciidoc[] +include::./index-serverless-project-settings.asciidoc[] // Hidden pages -include::{elasticsearch-serverless}/explore-your-data-visualize-your-data-create-dashboards.asciidoc[leveloffset=+1] -include::{elasticsearch-serverless}/explore-your-data-visualize-your-data-create-visualizations.asciidoc[leveloffset=+1] -include::{general-serverless}/visualize-library.asciidoc[leveloffset=+1] \ No newline at end of file +include::./pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc[leveloffset=+1] +include::./pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc[leveloffset=+1] +include::./pages/visualize-library.asciidoc[leveloffset=+1] \ No newline at end of file diff --git a/serverless/pages/project-settings/action-connectors.asciidoc b/serverless/pages/action-connectors.asciidoc similarity index 100% rename from serverless/pages/project-settings/action-connectors.asciidoc rename to serverless/pages/action-connectors.asciidoc diff --git a/serverless/pages/project-settings/action-connectors.mdx b/serverless/pages/action-connectors.mdx similarity index 100% rename from serverless/pages/project-settings/action-connectors.mdx rename to serverless/pages/action-connectors.mdx diff --git a/serverless/pages/project-settings/api-keys.asciidoc b/serverless/pages/api-keys.asciidoc similarity index 100% rename from serverless/pages/project-settings/api-keys.asciidoc rename to serverless/pages/api-keys.asciidoc diff --git a/serverless/pages/project-settings/api-keys.mdx b/serverless/pages/api-keys.mdx similarity index 100% rename from serverless/pages/project-settings/api-keys.mdx rename to serverless/pages/api-keys.mdx diff --git a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc b/serverless/pages/apis-elasticsearch-conventions.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/apis-elasticsearch-conventions.asciidoc rename to serverless/pages/apis-elasticsearch-conventions.asciidoc diff --git a/serverless/pages/elasticsearch/apis-elasticsearch-conventions.mdx b/serverless/pages/apis-elasticsearch-conventions.mdx similarity index 100% rename from serverless/pages/elasticsearch/apis-elasticsearch-conventions.mdx rename to serverless/pages/apis-elasticsearch-conventions.mdx diff --git a/serverless/pages/elasticsearch/apis-http-apis.asciidoc b/serverless/pages/apis-http-apis.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/apis-http-apis.asciidoc rename to serverless/pages/apis-http-apis.asciidoc diff --git a/serverless/pages/elasticsearch/apis-http-apis.mdx b/serverless/pages/apis-http-apis.mdx similarity index 100% rename from serverless/pages/elasticsearch/apis-http-apis.mdx rename to serverless/pages/apis-http-apis.mdx diff --git a/serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc b/serverless/pages/apis-kibana-conventions.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/apis-kibana-conventions.asciidoc rename to serverless/pages/apis-kibana-conventions.asciidoc diff --git a/serverless/pages/elasticsearch/apis-kibana-conventions.mdx b/serverless/pages/apis-kibana-conventions.mdx similarity index 100% rename from serverless/pages/elasticsearch/apis-kibana-conventions.mdx rename to serverless/pages/apis-kibana-conventions.mdx diff --git a/serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc b/serverless/pages/clients-dot-net-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-dot-net-getting-started.asciidoc rename to serverless/pages/clients-dot-net-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-dot-net-getting-started.mdx b/serverless/pages/clients-dot-net-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-dot-net-getting-started.mdx rename to serverless/pages/clients-dot-net-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-go-getting-started.asciidoc b/serverless/pages/clients-go-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-go-getting-started.asciidoc rename to serverless/pages/clients-go-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-go-getting-started.mdx b/serverless/pages/clients-go-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-go-getting-started.mdx rename to serverless/pages/clients-go-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-java-getting-started.asciidoc b/serverless/pages/clients-java-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-java-getting-started.asciidoc rename to serverless/pages/clients-java-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-java-getting-started.mdx b/serverless/pages/clients-java-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-java-getting-started.mdx rename to serverless/pages/clients-java-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc b/serverless/pages/clients-nodejs-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-nodejs-getting-started.asciidoc rename to serverless/pages/clients-nodejs-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-nodejs-getting-started.mdx b/serverless/pages/clients-nodejs-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-nodejs-getting-started.mdx rename to serverless/pages/clients-nodejs-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-php-getting-started.asciidoc b/serverless/pages/clients-php-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-php-getting-started.asciidoc rename to serverless/pages/clients-php-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-php-getting-started.mdx b/serverless/pages/clients-php-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-php-getting-started.mdx rename to serverless/pages/clients-php-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-python-getting-started.asciidoc b/serverless/pages/clients-python-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-python-getting-started.asciidoc rename to serverless/pages/clients-python-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-python-getting-started.mdx b/serverless/pages/clients-python-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-python-getting-started.mdx rename to serverless/pages/clients-python-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc b/serverless/pages/clients-ruby-getting-started.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients-ruby-getting-started.asciidoc rename to serverless/pages/clients-ruby-getting-started.asciidoc diff --git a/serverless/pages/elasticsearch/clients-ruby-getting-started.mdx b/serverless/pages/clients-ruby-getting-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients-ruby-getting-started.mdx rename to serverless/pages/clients-ruby-getting-started.mdx diff --git a/serverless/pages/elasticsearch/clients.asciidoc b/serverless/pages/clients.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/clients.asciidoc rename to serverless/pages/clients.asciidoc diff --git a/serverless/pages/elasticsearch/clients.mdx b/serverless/pages/clients.mdx similarity index 100% rename from serverless/pages/elasticsearch/clients.mdx rename to serverless/pages/clients.mdx diff --git a/serverless/pages/general/cloud-regions.asciidoc b/serverless/pages/cloud-regions.asciidoc similarity index 100% rename from serverless/pages/general/cloud-regions.asciidoc rename to serverless/pages/cloud-regions.asciidoc diff --git a/serverless/pages/general/cloud-regions.mdx b/serverless/pages/cloud-regions.mdx similarity index 100% rename from serverless/pages/general/cloud-regions.mdx rename to serverless/pages/cloud-regions.mdx diff --git a/serverless/pages/project-settings/custom-roles.asciidoc b/serverless/pages/custom-roles.asciidoc similarity index 100% rename from serverless/pages/project-settings/custom-roles.asciidoc rename to serverless/pages/custom-roles.asciidoc diff --git a/serverless/pages/project-settings/custom-roles.mdx b/serverless/pages/custom-roles.mdx similarity index 100% rename from serverless/pages/project-settings/custom-roles.mdx rename to serverless/pages/custom-roles.mdx diff --git a/serverless/pages/project-settings/data-views.asciidoc b/serverless/pages/data-views.asciidoc similarity index 100% rename from serverless/pages/project-settings/data-views.asciidoc rename to serverless/pages/data-views.asciidoc diff --git a/serverless/pages/project-settings/data-views.mdx b/serverless/pages/data-views.mdx similarity index 100% rename from serverless/pages/project-settings/data-views.mdx rename to serverless/pages/data-views.mdx diff --git a/serverless/pages/devtools/debug-grok-expressions.asciidoc b/serverless/pages/debug-grok-expressions.asciidoc similarity index 100% rename from serverless/pages/devtools/debug-grok-expressions.asciidoc rename to serverless/pages/debug-grok-expressions.asciidoc diff --git a/serverless/pages/devtools/debug-grok-expressions.mdx b/serverless/pages/debug-grok-expressions.mdx similarity index 100% rename from serverless/pages/devtools/debug-grok-expressions.mdx rename to serverless/pages/debug-grok-expressions.mdx diff --git a/serverless/pages/devtools/debug-painless-scripts.asciidoc b/serverless/pages/debug-painless-scripts.asciidoc similarity index 100% rename from serverless/pages/devtools/debug-painless-scripts.asciidoc rename to serverless/pages/debug-painless-scripts.asciidoc diff --git a/serverless/pages/devtools/debug-painless-scripts.mdx b/serverless/pages/debug-painless-scripts.mdx similarity index 100% rename from serverless/pages/devtools/debug-painless-scripts.mdx rename to serverless/pages/debug-painless-scripts.mdx diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.asciidoc b/serverless/pages/developer-tools-troubleshooting.asciidoc similarity index 100% rename from serverless/pages/devtools/developer-tools-troubleshooting.asciidoc rename to serverless/pages/developer-tools-troubleshooting.asciidoc diff --git a/serverless/pages/devtools/developer-tools-troubleshooting.mdx b/serverless/pages/developer-tools-troubleshooting.mdx similarity index 100% rename from serverless/pages/devtools/developer-tools-troubleshooting.mdx rename to serverless/pages/developer-tools-troubleshooting.mdx diff --git a/serverless/pages/devtools/index.asciidoc b/serverless/pages/devtools/index.asciidoc deleted file mode 100644 index af192f9c03..0000000000 --- a/serverless/pages/devtools/index.asciidoc +++ /dev/null @@ -1,19 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -[[developer-tools]] -== Dev tools - -include::./general-developer-tools.asciidoc[leveloffset=+2] - -include::./run-api-requests-in-the-console.asciidoc[leveloffset=+2] - -include::./profile-queries-and-aggregations.asciidoc[leveloffset=+2] - -include::./debug-grok-expressions.asciidoc[leveloffset=+2] - -include::./debug-painless-scripts.asciidoc[leveloffset=+2] - -include::./developer-tools-troubleshooting.asciidoc[leveloffset=+2] diff --git a/serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc b/serverless/pages/elasticsearch-developer-tools.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/elasticsearch-developer-tools.asciidoc rename to serverless/pages/elasticsearch-developer-tools.asciidoc diff --git a/serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx b/serverless/pages/elasticsearch-developer-tools.mdx similarity index 100% rename from serverless/pages/elasticsearch/elasticsearch-developer-tools.mdx rename to serverless/pages/elasticsearch-developer-tools.mdx diff --git a/serverless/pages/elasticsearch/index.asciidoc b/serverless/pages/elasticsearch/index.asciidoc deleted file mode 100644 index 478fff6208..0000000000 --- a/serverless/pages/elasticsearch/index.asciidoc +++ /dev/null @@ -1,58 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -[[what-is-elasticsearch-serverless]] -== Elasticsearch - -++++ -Elasticsearch -++++ - -include::./what-is-elasticsearch-serverless.asciidoc[leveloffset=+2] - -include::./pricing.asciidoc[leveloffset=+2] - -include::./get-started.asciidoc[leveloffset=+2] - -include::./clients.asciidoc[leveloffset=+2] -include::./clients-go-getting-started.asciidoc[leveloffset=+3] -include::./clients-java-getting-started.asciidoc[leveloffset=+3] -include::./clients-dot-net-getting-started.asciidoc[leveloffset=+3] -include::./clients-nodejs-getting-started.asciidoc[leveloffset=+3] -include::./clients-php-getting-started.asciidoc[leveloffset=+3] -include::./clients-python-getting-started.asciidoc[leveloffset=+3] -include::./clients-ruby-getting-started.asciidoc[leveloffset=+3] - -include::./apis-http-apis.asciidoc[leveloffset=+2] -include::./apis-elasticsearch-conventions.asciidoc[leveloffset=+3] -include::./apis-kibana-conventions.asciidoc[leveloffset=+3] - -include::./elasticsearch-developer-tools.asciidoc[leveloffset=+2] - -include::./ingest-your-data.asciidoc[leveloffset=+2] -include::./ingest-your-data-ingest-data-through-api.asciidoc[leveloffset=+3] -include::./ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc[leveloffset=+3] -include::./ingest-your-data-upload-file.asciidoc[leveloffset=+3] -include::./ingest-your-data-ingest-data-through-integrations-logstash.asciidoc[leveloffset=+3] -include::./ingest-your-data-ingest-data-through-integrations-beats.asciidoc[leveloffset=+3] - -include::./search-your-data.asciidoc[leveloffset=+2] -include::./search-your-data-the-search-api.asciidoc[leveloffset=+3] -include::./search-with-synonyms.asciidoc[leveloffset=+3] -include::./knn-search.asciidoc[leveloffset=+3] -include::./search-your-data-semantic-search.asciidoc[leveloffset=+3] -include::./search-your-data-semantic-search-elser.asciidoc[leveloffset=+4] - -include::./explore-your-data.asciidoc[leveloffset=+2] -include::./explore-your-data-the-aggregations-api.asciidoc[leveloffset=+3] -include::./explore-your-data-discover-your-data.asciidoc[leveloffset=+3] -include::./explore-your-data-visualize-your-data.asciidoc[leveloffset=+3] -include::./explore-your-data-alerting.asciidoc[leveloffset=+3] - -include::./search-playground.asciidoc[leveloffset=+2] - -include::./serverless-differences.asciidoc[leveloffset=+2] - -include::./technical-preview-limitations.asciidoc[leveloffset=+2] diff --git a/serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc b/serverless/pages/explore-your-data-alerting.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-alerting.asciidoc rename to serverless/pages/explore-your-data-alerting.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data-alerting.mdx b/serverless/pages/explore-your-data-alerting.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-alerting.mdx rename to serverless/pages/explore-your-data-alerting.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc b/serverless/pages/explore-your-data-discover-your-data.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-discover-your-data.asciidoc rename to serverless/pages/explore-your-data-discover-your-data.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx b/serverless/pages/explore-your-data-discover-your-data.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-discover-your-data.mdx rename to serverless/pages/explore-your-data-discover-your-data.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc b/serverless/pages/explore-your-data-the-aggregations-api.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.asciidoc rename to serverless/pages/explore-your-data-the-aggregations-api.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.mdx b/serverless/pages/explore-your-data-the-aggregations-api.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-the-aggregations-api.mdx rename to serverless/pages/explore-your-data-the-aggregations-api.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc b/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.asciidoc rename to serverless/pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.mdx b/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-dashboards.mdx rename to serverless/pages/explore-your-data-visualize-your-data-create-dashboards.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc b/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.asciidoc rename to serverless/pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx b/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-visualize-your-data-create-visualizations.mdx rename to serverless/pages/explore-your-data-visualize-your-data-create-visualizations.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc b/serverless/pages/explore-your-data-visualize-your-data.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-visualize-your-data.asciidoc rename to serverless/pages/explore-your-data-visualize-your-data.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data-visualize-your-data.mdx b/serverless/pages/explore-your-data-visualize-your-data.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data-visualize-your-data.mdx rename to serverless/pages/explore-your-data-visualize-your-data.mdx diff --git a/serverless/pages/elasticsearch/explore-your-data.asciidoc b/serverless/pages/explore-your-data.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data.asciidoc rename to serverless/pages/explore-your-data.asciidoc diff --git a/serverless/pages/elasticsearch/explore-your-data.mdx b/serverless/pages/explore-your-data.mdx similarity index 100% rename from serverless/pages/elasticsearch/explore-your-data.mdx rename to serverless/pages/explore-your-data.mdx diff --git a/serverless/pages/project-settings/files.asciidoc b/serverless/pages/files.asciidoc similarity index 100% rename from serverless/pages/project-settings/files.asciidoc rename to serverless/pages/files.asciidoc diff --git a/serverless/pages/project-settings/files.mdx b/serverless/pages/files.mdx similarity index 100% rename from serverless/pages/project-settings/files.mdx rename to serverless/pages/files.mdx diff --git a/serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc b/serverless/pages/fleet-and-elastic-agent.asciidoc similarity index 100% rename from serverless/pages/project-settings/fleet-and-elastic-agent.asciidoc rename to serverless/pages/fleet-and-elastic-agent.asciidoc diff --git a/serverless/pages/project-settings/fleet-and-elastic-agent.mdx b/serverless/pages/fleet-and-elastic-agent.mdx similarity index 100% rename from serverless/pages/project-settings/fleet-and-elastic-agent.mdx rename to serverless/pages/fleet-and-elastic-agent.mdx diff --git a/serverless/pages/devtools/general-developer-tools.asciidoc b/serverless/pages/general-developer-tools.asciidoc similarity index 100% rename from serverless/pages/devtools/general-developer-tools.asciidoc rename to serverless/pages/general-developer-tools.asciidoc diff --git a/serverless/pages/devtools/general-developer-tools.mdx b/serverless/pages/general-developer-tools.mdx similarity index 100% rename from serverless/pages/devtools/general-developer-tools.mdx rename to serverless/pages/general-developer-tools.mdx diff --git a/serverless/pages/general/index.asciidoc b/serverless/pages/general/index.asciidoc deleted file mode 100644 index 7814def389..0000000000 --- a/serverless/pages/general/index.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -[[intro]] -== Welcome to Elastic serverless - -include::{docs-content-root}/serverless/pages/welcome-to-serverless.asciidoc[leveloffset=+2] - -include::./what-is-serverless.asciidoc[leveloffset=+2] - -include::./sign-up.asciidoc[leveloffset=+2] - -include::./manage-org.asciidoc[leveloffset=+2] -include::./manage-access-to-org.asciidoc[leveloffset=+3] -include::./manage-access-to-org-user-roles.asciidoc[leveloffset=+3] -include::./manage-access-to-org-from-existing-account.asciidoc[leveloffset=+3] - -include::./manage-your-project.asciidoc[leveloffset=+2] -include::./manage-your-project-rest-api.asciidoc[leveloffset=+3] - -include::./manage-billing.asciidoc[leveloffset=+2] -include::./manage-billing-check-subscription.asciidoc[leveloffset=+3] -include::./manage-billing-monitor-usage.asciidoc[leveloffset=+3] -include::./manage-billing-history.asciidoc[leveloffset=+3] -include::./manage-billing-pricing-model.asciidoc[leveloffset=+3] -include::./manage-billing-stop-project.asciidoc[leveloffset=+3] - -include::./service-status.asciidoc[leveloffset=+2] - -include::./user-profile.asciidoc[leveloffset=+2] - -include::./cloud-regions.asciidoc[leveloffset=+2] diff --git a/serverless/pages/elasticsearch/get-started.asciidoc b/serverless/pages/get-started.asciidoc similarity index 99% rename from serverless/pages/elasticsearch/get-started.asciidoc rename to serverless/pages/get-started.asciidoc index 6b176b53a1..7433ab7ed8 100644 --- a/serverless/pages/elasticsearch/get-started.asciidoc +++ b/serverless/pages/get-started.asciidoc @@ -28,7 +28,7 @@ Select **Create project** to continue. You should now see **Get started with {es}**, and you're ready to continue. -include::../../partials/minimum-vcus-detail.asciidoc[] +include::../partials/minimum-vcus-detail.asciidoc[] [discrete] [[elasticsearch-get-started-create-api-key]] diff --git a/serverless/pages/elasticsearch/get-started.mdx b/serverless/pages/get-started.mdx similarity index 100% rename from serverless/pages/elasticsearch/get-started.mdx rename to serverless/pages/get-started.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc index 2ab65e9039..dc7b548682 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc @@ -10,7 +10,7 @@ appropriate trained model. There is tooling support in https://github.com/elastic/eland[Eland] and {kib} to help you prepare and manage models. -. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/select-model[Select a trained model]. -. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/import-model[Import the trained model and vocabulary]. -. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/deploy-model[Deploy the model in your cluster]. -. https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/deploy-trained-models/try-it-out[Try it out]. +. <>. +. <>. +. <>. +. <>. diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc index edc2621c57..8ee76afffb 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc @@ -7,5 +7,5 @@ preview:[] The following pages contain end-to-end examples of how to use the different {nlp} tasks in the {stack}. -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/examples/ner[How to deploy named entity recognition] -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/examples/text-embedding-vector-search[How to deploy a text embedding model and use it for semantic search] +* <> +* <> diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc index 8436db9a89..a92e421baa 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc @@ -10,7 +10,7 @@ preview:[] A {lang-ident} model is provided in your cluster, which you can use in an {infer} processor of an ingest pipeline by using its model ID (`lang_ident_model_1`). For an example, refer to -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[Add NLP {infer} to ingest pipelines]. +<>. The longer the text passed into the {lang-ident} model, the more accurately the model can identify the language. It is fairly accurate on short samples (for diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc index ec78406ca9..3832421e5b 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc @@ -31,7 +31,7 @@ the way described, or at all. These models are listed by NLP task; for more information about those tasks, refer to -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp[Overview]. +<>. **Models highlighted in bold** in the list below are recommended for evaluation purposes and to get started with the Elastic {nlp} features. diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc index 07ca6c61b0..f92145c1e3 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc @@ -8,5 +8,5 @@ preview:[] You can use models that are trained and provided by Elastic that are available within the {stack} with a click of a button. -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/elastic-models/elser[ELSER – Elastic Learned Sparse EncodeR] -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/ootb-models/lang-ident[Language identification] +* <> +* <> diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc index 6e34448682..feaa640a42 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc @@ -5,7 +5,7 @@ preview:[] Per the -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp[Overview], +<>, there are multiple ways that you can use NLP features within the {stack}. After you determine which type of NLP task you want to perform, you must choose an appropriate trained model. @@ -16,9 +16,9 @@ data sets available for specific NLP tasks on https://huggingface.co/models[Hugging Face]. These instructions assume you're using one of those models and do not describe how to create new models. For the current list of supported model architectures, refer to -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/model-reference[Compatible third party NLP models]. +<>. If you choose to perform {lang-ident} by using the `lang_ident_model_1` that is provided in the cluster, no further steps are required to import or deploy the model. You can skip to using the model in -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[ingestion pipelines]. +<>. diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc index cde6a09341..50f7b25d66 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc @@ -64,4 +64,4 @@ recognized entities: // NOTCONSOLE If you are satisfied with the results, you can add these NLP tasks in your -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/inference[ingestion pipelines]. +<>. diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc b/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc index 1e213d13fd..3e7f3869dc 100644 --- a/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc +++ b/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc @@ -20,7 +20,7 @@ techniques. The {stack} {ml} features are structured around BERT and transformer models. These features support BERT’s tokenization scheme (called WordPiece) and transformer models that conform to the standard BERT model interface. For the current list of supported architectures, refer to -https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/model-reference[Compatible third party NLP models]. +<>. To incorporate transformer models and make predictions, {es} uses libtorch, which is an underlying native library for PyTorch. Trained models must be in a @@ -28,6 +28,6 @@ TorchScript representation for use with {stack} {ml} features. You can perform the following NLP operations: -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/extract-info[Extract information] -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/classify-text[Classify text] -* https://www.elastic.co/docs/current/serverless/elasticsearch/explore-your-data-ml-nlp/search-compare-text[Search and compare text] +* <> +* <> +* <> diff --git a/serverless/pages/project-settings/index-management.asciidoc b/serverless/pages/index-management.asciidoc similarity index 100% rename from serverless/pages/project-settings/index-management.asciidoc rename to serverless/pages/index-management.asciidoc diff --git a/serverless/pages/project-settings/index-management.mdx b/serverless/pages/index-management.mdx similarity index 100% rename from serverless/pages/project-settings/index-management.mdx rename to serverless/pages/index-management.mdx diff --git a/serverless/pages/project-settings/ingest-pipelines.asciidoc b/serverless/pages/ingest-pipelines.asciidoc similarity index 100% rename from serverless/pages/project-settings/ingest-pipelines.asciidoc rename to serverless/pages/ingest-pipelines.asciidoc diff --git a/serverless/pages/project-settings/ingest-pipelines.mdx b/serverless/pages/ingest-pipelines.mdx similarity index 100% rename from serverless/pages/project-settings/ingest-pipelines.mdx rename to serverless/pages/ingest-pipelines.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-api.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.asciidoc rename to serverless/pages/ingest-your-data-ingest-data-through-api.asciidoc diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.mdx b/serverless/pages/ingest-your-data-ingest-data-through-api.mdx similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-api.mdx rename to serverless/pages/ingest-your-data-ingest-data-through-api.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.asciidoc rename to serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.mdx b/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.mdx similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-beats.mdx rename to serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc rename to serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx b/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.mdx similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-connector-client.mdx rename to serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc rename to serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc diff --git a/serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.mdx b/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.mdx similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-ingest-data-through-integrations-logstash.mdx rename to serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc b/serverless/pages/ingest-your-data-upload-file.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-upload-file.asciidoc rename to serverless/pages/ingest-your-data-upload-file.asciidoc diff --git a/serverless/pages/elasticsearch/ingest-your-data-upload-file.mdx b/serverless/pages/ingest-your-data-upload-file.mdx similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data-upload-file.mdx rename to serverless/pages/ingest-your-data-upload-file.mdx diff --git a/serverless/pages/elasticsearch/ingest-your-data.asciidoc b/serverless/pages/ingest-your-data.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data.asciidoc rename to serverless/pages/ingest-your-data.asciidoc diff --git a/serverless/pages/elasticsearch/ingest-your-data.mdx b/serverless/pages/ingest-your-data.mdx similarity index 100% rename from serverless/pages/elasticsearch/ingest-your-data.mdx rename to serverless/pages/ingest-your-data.mdx diff --git a/serverless/pages/project-settings/integrations.asciidoc b/serverless/pages/integrations.asciidoc similarity index 100% rename from serverless/pages/project-settings/integrations.asciidoc rename to serverless/pages/integrations.asciidoc diff --git a/serverless/pages/project-settings/integrations.mdx b/serverless/pages/integrations.mdx similarity index 100% rename from serverless/pages/project-settings/integrations.mdx rename to serverless/pages/integrations.mdx diff --git a/serverless/pages/elasticsearch/knn-search.asciidoc b/serverless/pages/knn-search.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/knn-search.asciidoc rename to serverless/pages/knn-search.asciidoc diff --git a/serverless/pages/elasticsearch/knn-search.mdx b/serverless/pages/knn-search.mdx similarity index 100% rename from serverless/pages/elasticsearch/knn-search.mdx rename to serverless/pages/knn-search.mdx diff --git a/serverless/pages/project-settings/logstash-pipelines.asciidoc b/serverless/pages/logstash-pipelines.asciidoc similarity index 100% rename from serverless/pages/project-settings/logstash-pipelines.asciidoc rename to serverless/pages/logstash-pipelines.asciidoc diff --git a/serverless/pages/project-settings/logstash-pipelines.mdx b/serverless/pages/logstash-pipelines.mdx similarity index 100% rename from serverless/pages/project-settings/logstash-pipelines.mdx rename to serverless/pages/logstash-pipelines.mdx diff --git a/serverless/pages/project-settings/machine-learning.asciidoc b/serverless/pages/machine-learning.asciidoc similarity index 100% rename from serverless/pages/project-settings/machine-learning.asciidoc rename to serverless/pages/machine-learning.asciidoc diff --git a/serverless/pages/project-settings/machine-learning.mdx b/serverless/pages/machine-learning.mdx similarity index 100% rename from serverless/pages/project-settings/machine-learning.mdx rename to serverless/pages/machine-learning.mdx diff --git a/serverless/pages/project-settings/maintenance-windows.asciidoc b/serverless/pages/maintenance-windows.asciidoc similarity index 100% rename from serverless/pages/project-settings/maintenance-windows.asciidoc rename to serverless/pages/maintenance-windows.asciidoc diff --git a/serverless/pages/project-settings/maintenance-windows.mdx b/serverless/pages/maintenance-windows.mdx similarity index 100% rename from serverless/pages/project-settings/maintenance-windows.mdx rename to serverless/pages/maintenance-windows.mdx diff --git a/serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc b/serverless/pages/manage-access-to-org-from-existing-account.asciidoc similarity index 100% rename from serverless/pages/general/manage-access-to-org-from-existing-account.asciidoc rename to serverless/pages/manage-access-to-org-from-existing-account.asciidoc diff --git a/serverless/pages/general/manage-access-to-org-from-existing-account.mdx b/serverless/pages/manage-access-to-org-from-existing-account.mdx similarity index 100% rename from serverless/pages/general/manage-access-to-org-from-existing-account.mdx rename to serverless/pages/manage-access-to-org-from-existing-account.mdx diff --git a/serverless/pages/general/manage-access-to-org-user-roles.asciidoc b/serverless/pages/manage-access-to-org-user-roles.asciidoc similarity index 100% rename from serverless/pages/general/manage-access-to-org-user-roles.asciidoc rename to serverless/pages/manage-access-to-org-user-roles.asciidoc diff --git a/serverless/pages/general/manage-access-to-org-user-roles.mdx b/serverless/pages/manage-access-to-org-user-roles.mdx similarity index 100% rename from serverless/pages/general/manage-access-to-org-user-roles.mdx rename to serverless/pages/manage-access-to-org-user-roles.mdx diff --git a/serverless/pages/general/manage-access-to-org.asciidoc b/serverless/pages/manage-access-to-org.asciidoc similarity index 100% rename from serverless/pages/general/manage-access-to-org.asciidoc rename to serverless/pages/manage-access-to-org.asciidoc diff --git a/serverless/pages/general/manage-access-to-org.mdx b/serverless/pages/manage-access-to-org.mdx similarity index 100% rename from serverless/pages/general/manage-access-to-org.mdx rename to serverless/pages/manage-access-to-org.mdx diff --git a/serverless/pages/general/manage-billing-check-subscription.asciidoc b/serverless/pages/manage-billing-check-subscription.asciidoc similarity index 100% rename from serverless/pages/general/manage-billing-check-subscription.asciidoc rename to serverless/pages/manage-billing-check-subscription.asciidoc diff --git a/serverless/pages/general/manage-billing-check-subscription.mdx b/serverless/pages/manage-billing-check-subscription.mdx similarity index 100% rename from serverless/pages/general/manage-billing-check-subscription.mdx rename to serverless/pages/manage-billing-check-subscription.mdx diff --git a/serverless/pages/general/manage-billing-history.asciidoc b/serverless/pages/manage-billing-history.asciidoc similarity index 100% rename from serverless/pages/general/manage-billing-history.asciidoc rename to serverless/pages/manage-billing-history.asciidoc diff --git a/serverless/pages/general/manage-billing-history.mdx b/serverless/pages/manage-billing-history.mdx similarity index 100% rename from serverless/pages/general/manage-billing-history.mdx rename to serverless/pages/manage-billing-history.mdx diff --git a/serverless/pages/general/manage-billing-monitor-usage.asciidoc b/serverless/pages/manage-billing-monitor-usage.asciidoc similarity index 100% rename from serverless/pages/general/manage-billing-monitor-usage.asciidoc rename to serverless/pages/manage-billing-monitor-usage.asciidoc diff --git a/serverless/pages/general/manage-billing-monitor-usage.mdx b/serverless/pages/manage-billing-monitor-usage.mdx similarity index 100% rename from serverless/pages/general/manage-billing-monitor-usage.mdx rename to serverless/pages/manage-billing-monitor-usage.mdx diff --git a/serverless/pages/general/manage-billing-pricing-model.asciidoc b/serverless/pages/manage-billing-pricing-model.asciidoc similarity index 100% rename from serverless/pages/general/manage-billing-pricing-model.asciidoc rename to serverless/pages/manage-billing-pricing-model.asciidoc diff --git a/serverless/pages/general/manage-billing-pricing-model.mdx b/serverless/pages/manage-billing-pricing-model.mdx similarity index 100% rename from serverless/pages/general/manage-billing-pricing-model.mdx rename to serverless/pages/manage-billing-pricing-model.mdx diff --git a/serverless/pages/general/manage-billing-stop-project.asciidoc b/serverless/pages/manage-billing-stop-project.asciidoc similarity index 100% rename from serverless/pages/general/manage-billing-stop-project.asciidoc rename to serverless/pages/manage-billing-stop-project.asciidoc diff --git a/serverless/pages/general/manage-billing-stop-project.mdx b/serverless/pages/manage-billing-stop-project.mdx similarity index 100% rename from serverless/pages/general/manage-billing-stop-project.mdx rename to serverless/pages/manage-billing-stop-project.mdx diff --git a/serverless/pages/general/manage-billing.asciidoc b/serverless/pages/manage-billing.asciidoc similarity index 100% rename from serverless/pages/general/manage-billing.asciidoc rename to serverless/pages/manage-billing.asciidoc diff --git a/serverless/pages/general/manage-billing.mdx b/serverless/pages/manage-billing.mdx similarity index 100% rename from serverless/pages/general/manage-billing.mdx rename to serverless/pages/manage-billing.mdx diff --git a/serverless/pages/general/manage-org.asciidoc b/serverless/pages/manage-org.asciidoc similarity index 100% rename from serverless/pages/general/manage-org.asciidoc rename to serverless/pages/manage-org.asciidoc diff --git a/serverless/pages/general/manage-org.mdx b/serverless/pages/manage-org.mdx similarity index 100% rename from serverless/pages/general/manage-org.mdx rename to serverless/pages/manage-org.mdx diff --git a/serverless/pages/general/manage-your-project-rest-api.asciidoc b/serverless/pages/manage-your-project-rest-api.asciidoc similarity index 100% rename from serverless/pages/general/manage-your-project-rest-api.asciidoc rename to serverless/pages/manage-your-project-rest-api.asciidoc diff --git a/serverless/pages/general/manage-your-project-rest-api.mdx b/serverless/pages/manage-your-project-rest-api.mdx similarity index 100% rename from serverless/pages/general/manage-your-project-rest-api.mdx rename to serverless/pages/manage-your-project-rest-api.mdx diff --git a/serverless/pages/general/manage-your-project.asciidoc b/serverless/pages/manage-your-project.asciidoc similarity index 100% rename from serverless/pages/general/manage-your-project.asciidoc rename to serverless/pages/manage-your-project.asciidoc diff --git a/serverless/pages/general/manage-your-project.mdx b/serverless/pages/manage-your-project.mdx similarity index 100% rename from serverless/pages/general/manage-your-project.mdx rename to serverless/pages/manage-your-project.mdx diff --git a/serverless/pages/project-settings/maps.asciidoc b/serverless/pages/maps.asciidoc similarity index 100% rename from serverless/pages/project-settings/maps.asciidoc rename to serverless/pages/maps.asciidoc diff --git a/serverless/pages/project-settings/maps.mdx b/serverless/pages/maps.mdx similarity index 100% rename from serverless/pages/project-settings/maps.mdx rename to serverless/pages/maps.mdx diff --git a/serverless/pages/elasticsearch/pricing.asciidoc b/serverless/pages/pricing.asciidoc similarity index 98% rename from serverless/pages/elasticsearch/pricing.asciidoc rename to serverless/pages/pricing.asciidoc index 45889c9ea8..f06b57165f 100644 --- a/serverless/pages/elasticsearch/pricing.asciidoc +++ b/serverless/pages/pricing.asciidoc @@ -15,7 +15,7 @@ and the number of searches and latency you require for searches. In addition, if you required {ml} for inference or NLP tasks, those VCUs are also metered and billed. -include::../../partials/minimum-vcus-detail.asciidoc[] +include::../partials/minimum-vcus-detail.asciidoc[] [discrete] [[elasticsearch-billing-information-about-the-vcu-types-search-ingest-and-ml]] diff --git a/serverless/pages/elasticsearch/pricing.mdx b/serverless/pages/pricing.mdx similarity index 100% rename from serverless/pages/elasticsearch/pricing.mdx rename to serverless/pages/pricing.mdx diff --git a/serverless/pages/devtools/profile-queries-and-aggregations.asciidoc b/serverless/pages/profile-queries-and-aggregations.asciidoc similarity index 100% rename from serverless/pages/devtools/profile-queries-and-aggregations.asciidoc rename to serverless/pages/profile-queries-and-aggregations.asciidoc diff --git a/serverless/pages/devtools/profile-queries-and-aggregations.mdx b/serverless/pages/profile-queries-and-aggregations.mdx similarity index 100% rename from serverless/pages/devtools/profile-queries-and-aggregations.mdx rename to serverless/pages/profile-queries-and-aggregations.mdx diff --git a/serverless/pages/project-settings/project-and-management-settings.asciidoc b/serverless/pages/project-and-management-settings.asciidoc similarity index 100% rename from serverless/pages/project-settings/project-and-management-settings.asciidoc rename to serverless/pages/project-and-management-settings.asciidoc diff --git a/serverless/pages/project-settings/project-and-management-settings.mdx b/serverless/pages/project-and-management-settings.mdx similarity index 100% rename from serverless/pages/project-settings/project-and-management-settings.mdx rename to serverless/pages/project-and-management-settings.mdx diff --git a/serverless/pages/project-settings/project-settings.asciidoc b/serverless/pages/project-settings.asciidoc similarity index 100% rename from serverless/pages/project-settings/project-settings.asciidoc rename to serverless/pages/project-settings.asciidoc diff --git a/serverless/pages/project-settings/project-settings.mdx b/serverless/pages/project-settings.mdx similarity index 100% rename from serverless/pages/project-settings/project-settings.mdx rename to serverless/pages/project-settings.mdx diff --git a/serverless/pages/project-settings/index.asciidoc b/serverless/pages/project-settings/index.asciidoc deleted file mode 100644 index c3890f0702..0000000000 --- a/serverless/pages/project-settings/index.asciidoc +++ /dev/null @@ -1,32 +0,0 @@ -:doctype: book - -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] - -[[project-and-management-settings]] -== Project and management settings - -include::./project-and-management-settings.asciidoc[leveloffset=+2] - -include::./project-settings.asciidoc[leveloffset=+2] -include::./api-keys.asciidoc[leveloffset=+3] -include::./action-connectors.asciidoc[leveloffset=+3] -include::./custom-roles.asciidoc[leveloffset=+3] -include::./data-views.asciidoc[leveloffset=+3] -include::./files.asciidoc[leveloffset=+3] -include::./index-management.asciidoc[leveloffset=+3] -include::./ingest-pipelines.asciidoc[leveloffset=+3] -include::./logstash-pipelines.asciidoc[leveloffset=+3] -include::./machine-learning.asciidoc[leveloffset=+3] -include::./maintenance-windows.asciidoc[leveloffset=+3] -include::./maps.asciidoc[leveloffset=+3] -include::./reports.asciidoc[leveloffset=+3] -include::./rules.asciidoc[leveloffset=+3] -include::./saved-objects.asciidoc[leveloffset=+3] -include::./spaces.asciidoc[leveloffset=+3] -include::./tags.asciidoc[leveloffset=+3] -include::./transforms.asciidoc[leveloffset=+3] - -include::./integrations.asciidoc[leveloffset=+2] - -include::./fleet-and-elastic-agent.asciidoc[leveloffset=+2] diff --git a/serverless/pages/project-settings/reports.asciidoc b/serverless/pages/reports.asciidoc similarity index 100% rename from serverless/pages/project-settings/reports.asciidoc rename to serverless/pages/reports.asciidoc diff --git a/serverless/pages/project-settings/reports.mdx b/serverless/pages/reports.mdx similarity index 100% rename from serverless/pages/project-settings/reports.mdx rename to serverless/pages/reports.mdx diff --git a/serverless/pages/project-settings/rules.asciidoc b/serverless/pages/rules.asciidoc similarity index 100% rename from serverless/pages/project-settings/rules.asciidoc rename to serverless/pages/rules.asciidoc diff --git a/serverless/pages/project-settings/rules.mdx b/serverless/pages/rules.mdx similarity index 100% rename from serverless/pages/project-settings/rules.mdx rename to serverless/pages/rules.mdx diff --git a/serverless/pages/devtools/run-api-requests-in-the-console.asciidoc b/serverless/pages/run-api-requests-in-the-console.asciidoc similarity index 100% rename from serverless/pages/devtools/run-api-requests-in-the-console.asciidoc rename to serverless/pages/run-api-requests-in-the-console.asciidoc diff --git a/serverless/pages/devtools/run-api-requests-in-the-console.mdx b/serverless/pages/run-api-requests-in-the-console.mdx similarity index 100% rename from serverless/pages/devtools/run-api-requests-in-the-console.mdx rename to serverless/pages/run-api-requests-in-the-console.mdx diff --git a/serverless/pages/project-settings/saved-objects.asciidoc b/serverless/pages/saved-objects.asciidoc similarity index 100% rename from serverless/pages/project-settings/saved-objects.asciidoc rename to serverless/pages/saved-objects.asciidoc diff --git a/serverless/pages/project-settings/saved-objects.mdx b/serverless/pages/saved-objects.mdx similarity index 100% rename from serverless/pages/project-settings/saved-objects.mdx rename to serverless/pages/saved-objects.mdx diff --git a/serverless/pages/elasticsearch/search-playground.asciidoc b/serverless/pages/search-playground.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/search-playground.asciidoc rename to serverless/pages/search-playground.asciidoc diff --git a/serverless/pages/elasticsearch/search-playground.mdx b/serverless/pages/search-playground.mdx similarity index 100% rename from serverless/pages/elasticsearch/search-playground.mdx rename to serverless/pages/search-playground.mdx diff --git a/serverless/pages/elasticsearch/search-with-synonyms.asciidoc b/serverless/pages/search-with-synonyms.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/search-with-synonyms.asciidoc rename to serverless/pages/search-with-synonyms.asciidoc diff --git a/serverless/pages/elasticsearch/search-with-synonyms.mdx b/serverless/pages/search-with-synonyms.mdx similarity index 100% rename from serverless/pages/elasticsearch/search-with-synonyms.mdx rename to serverless/pages/search-with-synonyms.mdx diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc b/serverless/pages/search-your-data-semantic-search-elser.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/search-your-data-semantic-search-elser.asciidoc rename to serverless/pages/search-your-data-semantic-search-elser.asciidoc diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx b/serverless/pages/search-your-data-semantic-search-elser.mdx similarity index 100% rename from serverless/pages/elasticsearch/search-your-data-semantic-search-elser.mdx rename to serverless/pages/search-your-data-semantic-search-elser.mdx diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc b/serverless/pages/search-your-data-semantic-search.asciidoc similarity index 95% rename from serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc rename to serverless/pages/search-your-data-semantic-search.asciidoc index 271fc64081..c4a6192a6b 100644 --- a/serverless/pages/elasticsearch/search-your-data-semantic-search.asciidoc +++ b/serverless/pages/search-your-data-semantic-search.asciidoc @@ -68,7 +68,7 @@ important each is. After you decide which model you want to use for implementing semantic search, you need to deploy the model in {es}. -include::../../partials/deploy-nlp-model-widget.asciidoc[] +include::../partials/deploy-nlp-model-widget.asciidoc[] [discrete] [[semantic-search-field-mappings]] @@ -78,7 +78,7 @@ Before you start using the deployed model to generate embeddings based on your input text, you need to prepare your index mapping first. The mapping of the index depends on the type of model. -include::../../partials/field-mappings-widget.asciidoc[] +include::../partials/field-mappings-widget.asciidoc[] [discrete] [[semantic-search-generate-embeddings]] @@ -93,7 +93,7 @@ infer against the data ingested through the pipeline. After you created the ingest pipeline with the inference processor, you can ingest your data through it to generate the model output. -include::../../partials/generate-embeddings-widget.asciidoc[] +include::../partials/generate-embeddings-widget.asciidoc[] Now it is time to perform semantic search! @@ -104,7 +104,7 @@ Now it is time to perform semantic search! Depending on the type of model you have deployed, you can query sparse vectors with a sparse vector query, or dense vectors with a kNN search. -include::../../partials/search-widget.asciidoc[] +include::../partials/search-widget.asciidoc[] [discrete] [[semantic-search-hybrid-search]] @@ -118,7 +118,7 @@ Combining semantic and lexical search into one hybrid search request using but hybrid search using reciprocal rank fusion {blog-ref}improving-information-retrieval-elastic-stack-hybrid[has been shown to perform better in general]. -include::../../partials/hybrid-search-widget.asciidoc[] +include::../partials/hybrid-search-widget.asciidoc[] [discrete] [[semantic-search-read-more]] diff --git a/serverless/pages/elasticsearch/search-your-data-semantic-search.mdx b/serverless/pages/search-your-data-semantic-search.mdx similarity index 100% rename from serverless/pages/elasticsearch/search-your-data-semantic-search.mdx rename to serverless/pages/search-your-data-semantic-search.mdx diff --git a/serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc b/serverless/pages/search-your-data-the-search-api.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/search-your-data-the-search-api.asciidoc rename to serverless/pages/search-your-data-the-search-api.asciidoc diff --git a/serverless/pages/elasticsearch/search-your-data-the-search-api.mdx b/serverless/pages/search-your-data-the-search-api.mdx similarity index 100% rename from serverless/pages/elasticsearch/search-your-data-the-search-api.mdx rename to serverless/pages/search-your-data-the-search-api.mdx diff --git a/serverless/pages/elasticsearch/search-your-data.asciidoc b/serverless/pages/search-your-data.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/search-your-data.asciidoc rename to serverless/pages/search-your-data.asciidoc diff --git a/serverless/pages/elasticsearch/search-your-data.mdx b/serverless/pages/search-your-data.mdx similarity index 100% rename from serverless/pages/elasticsearch/search-your-data.mdx rename to serverless/pages/search-your-data.mdx diff --git a/serverless/pages/elasticsearch/serverless-differences.asciidoc b/serverless/pages/serverless-differences.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/serverless-differences.asciidoc rename to serverless/pages/serverless-differences.asciidoc diff --git a/serverless/pages/elasticsearch/serverless-differences.mdx b/serverless/pages/serverless-differences.mdx similarity index 100% rename from serverless/pages/elasticsearch/serverless-differences.mdx rename to serverless/pages/serverless-differences.mdx diff --git a/serverless/pages/general/service-status.asciidoc b/serverless/pages/service-status.asciidoc similarity index 100% rename from serverless/pages/general/service-status.asciidoc rename to serverless/pages/service-status.asciidoc diff --git a/serverless/pages/general/service-status.mdx b/serverless/pages/service-status.mdx similarity index 100% rename from serverless/pages/general/service-status.mdx rename to serverless/pages/service-status.mdx diff --git a/serverless/pages/general/sign-up.asciidoc b/serverless/pages/sign-up.asciidoc similarity index 100% rename from serverless/pages/general/sign-up.asciidoc rename to serverless/pages/sign-up.asciidoc diff --git a/serverless/pages/general/sign-up.mdx b/serverless/pages/sign-up.mdx similarity index 100% rename from serverless/pages/general/sign-up.mdx rename to serverless/pages/sign-up.mdx diff --git a/serverless/pages/project-settings/spaces.asciidoc b/serverless/pages/spaces.asciidoc similarity index 100% rename from serverless/pages/project-settings/spaces.asciidoc rename to serverless/pages/spaces.asciidoc diff --git a/serverless/pages/project-settings/spaces.mdx b/serverless/pages/spaces.mdx similarity index 100% rename from serverless/pages/project-settings/spaces.mdx rename to serverless/pages/spaces.mdx diff --git a/serverless/pages/project-settings/tags.asciidoc b/serverless/pages/tags.asciidoc similarity index 100% rename from serverless/pages/project-settings/tags.asciidoc rename to serverless/pages/tags.asciidoc diff --git a/serverless/pages/project-settings/tags.mdx b/serverless/pages/tags.mdx similarity index 100% rename from serverless/pages/project-settings/tags.mdx rename to serverless/pages/tags.mdx diff --git a/serverless/pages/elasticsearch/technical-preview-limitations.asciidoc b/serverless/pages/technical-preview-limitations.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/technical-preview-limitations.asciidoc rename to serverless/pages/technical-preview-limitations.asciidoc diff --git a/serverless/pages/elasticsearch/technical-preview-limitations.mdx b/serverless/pages/technical-preview-limitations.mdx similarity index 100% rename from serverless/pages/elasticsearch/technical-preview-limitations.mdx rename to serverless/pages/technical-preview-limitations.mdx diff --git a/serverless/pages/project-settings/transforms.asciidoc b/serverless/pages/transforms.asciidoc similarity index 100% rename from serverless/pages/project-settings/transforms.asciidoc rename to serverless/pages/transforms.asciidoc diff --git a/serverless/pages/project-settings/transforms.mdx b/serverless/pages/transforms.mdx similarity index 100% rename from serverless/pages/project-settings/transforms.mdx rename to serverless/pages/transforms.mdx diff --git a/serverless/pages/general/user-profile.asciidoc b/serverless/pages/user-profile.asciidoc similarity index 100% rename from serverless/pages/general/user-profile.asciidoc rename to serverless/pages/user-profile.asciidoc diff --git a/serverless/pages/general/user-profile.mdx b/serverless/pages/user-profile.mdx similarity index 100% rename from serverless/pages/general/user-profile.mdx rename to serverless/pages/user-profile.mdx diff --git a/serverless/pages/general/visualize-library.asciidoc b/serverless/pages/visualize-library.asciidoc similarity index 100% rename from serverless/pages/general/visualize-library.asciidoc rename to serverless/pages/visualize-library.asciidoc diff --git a/serverless/pages/general/visualize-library.mdx b/serverless/pages/visualize-library.mdx similarity index 100% rename from serverless/pages/general/visualize-library.mdx rename to serverless/pages/visualize-library.mdx diff --git a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc b/serverless/pages/what-is-elasticsearch-serverless.asciidoc similarity index 100% rename from serverless/pages/elasticsearch/what-is-elasticsearch-serverless.asciidoc rename to serverless/pages/what-is-elasticsearch-serverless.asciidoc diff --git a/serverless/pages/elasticsearch/what-is-elasticsearch-serverless.mdx b/serverless/pages/what-is-elasticsearch-serverless.mdx similarity index 100% rename from serverless/pages/elasticsearch/what-is-elasticsearch-serverless.mdx rename to serverless/pages/what-is-elasticsearch-serverless.mdx diff --git a/serverless/pages/general/what-is-serverless.asciidoc b/serverless/pages/what-is-serverless.asciidoc similarity index 100% rename from serverless/pages/general/what-is-serverless.asciidoc rename to serverless/pages/what-is-serverless.asciidoc diff --git a/serverless/pages/general/what-is-serverless.mdx b/serverless/pages/what-is-serverless.mdx similarity index 100% rename from serverless/pages/general/what-is-serverless.mdx rename to serverless/pages/what-is-serverless.mdx diff --git a/serverless/partials/field-mappings-elser.asciidoc b/serverless/partials/field-mappings-elser.asciidoc index 25f7aa5cf6..e633b80b7c 100644 --- a/serverless/partials/field-mappings-elser.asciidoc +++ b/serverless/partials/field-mappings-elser.asciidoc @@ -3,7 +3,7 @@ The {es} {ref}/sparse-vector.html[`sparse_vector`] field type can store these token-weight pairs as numeric feature vectors. The index must have a field with the `sparse_vector` field type to index the tokens that ELSER generates. -To create a mapping for your ELSER index, refer to the https://www.elastic.co/docs/current/serverless/elasticsearch/elasticsearch/reference/semantic-search-elser[Create the index mapping section] +To create a mapping for your ELSER index, refer to the <> of the tutorial. The example shows how to create an index mapping for `my-index` that defines the `my_embeddings.tokens` field - which will contain the ELSER output - as a diff --git a/serverless/partials/generate-embeddings-elser.asciidoc b/serverless/partials/generate-embeddings-elser.asciidoc index 3badc18360..2cb2c2a88c 100644 --- a/serverless/partials/generate-embeddings-elser.asciidoc +++ b/serverless/partials/generate-embeddings-elser.asciidoc @@ -26,6 +26,6 @@ curl -X PUT "${ES_URL}/_ingest/pipeline/my-text-embeddings-pipeline" \ ---- To ingest data through the pipeline to generate tokens with ELSER, refer to the -https://www.elastic.co/docs/current/serverless/elasticsearch/elasticsearch/reference/semantic-search-elser[Ingest the data through the {infer} ingest pipeline] section of the tutorial. After you successfully +<> section of the tutorial. After you successfully ingested documents by using the pipeline, your index will contain the tokens generated by ELSER. diff --git a/serverless/partials/search-dense-vector.asciidoc b/serverless/partials/search-dense-vector.asciidoc index 897af68487..ee13517be3 100644 --- a/serverless/partials/search-dense-vector.asciidoc +++ b/serverless/partials/search-dense-vector.asciidoc @@ -1,5 +1,5 @@ Text embeddings produced by dense vector models can be queried using a -https://www.elastic.co/docs/current/serverless/elasticsearch/knn-search[kNN search]. +<>. In the `knn` clause, provide the name of the dense vector field, and a `query_vector_builder` clause with the model ID and the query text. From ae55bf1b293204b66d727202ccde4bc55e9177fc Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 15:16:23 -0600 Subject: [PATCH 22/25] move non-published files back to pages directory --- .../{hidden => }/explore-your-data-ml-nlp-classify-text.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-classify-text.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-deploy-model.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-deploy-model.mdx | 0 .../explore-your-data-ml-nlp-deploy-trained-models.asciidoc | 0 .../explore-your-data-ml-nlp-deploy-trained-models.mdx | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-elser.asciidoc | 0 serverless/pages/{hidden => }/explore-your-data-ml-nlp-elser.mdx | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-examples.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-examples.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-extract-info.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-extract-info.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-import-model.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-import-model.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-inference.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-inference.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-lang-ident.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-lang-ident.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-model-ref.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-model-ref.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-ner-example.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-ner-example.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-ootb-models.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-ootb-models.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-search-compare.asciidoc | 0 .../{hidden => }/explore-your-data-ml-nlp-search-compare.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-select-model.asciidoc | 0 .../pages/{hidden => }/explore-your-data-ml-nlp-select-model.mdx | 0 .../{hidden => }/explore-your-data-ml-nlp-test-inference.asciidoc | 0 .../{hidden => }/explore-your-data-ml-nlp-test-inference.mdx | 0 .../explore-your-data-ml-nlp-text-embedding-example.asciidoc | 0 .../explore-your-data-ml-nlp-text-embedding-example.mdx | 0 serverless/pages/{hidden => }/explore-your-data-ml-nlp.asciidoc | 0 serverless/pages/{hidden => }/explore-your-data-ml-nlp.mdx | 0 34 files changed, 0 insertions(+), 0 deletions(-) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-classify-text.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-classify-text.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-deploy-model.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-deploy-model.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-deploy-trained-models.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-deploy-trained-models.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-elser.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-elser.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-examples.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-examples.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-extract-info.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-extract-info.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-import-model.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-import-model.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-inference.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-inference.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-lang-ident.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-lang-ident.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-model-ref.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-model-ref.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-ner-example.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-ner-example.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-ootb-models.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-ootb-models.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-search-compare.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-search-compare.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-select-model.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-select-model.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-test-inference.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-test-inference.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-text-embedding-example.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp-text-embedding-example.mdx (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp.asciidoc (100%) rename serverless/pages/{hidden => }/explore-your-data-ml-nlp.mdx (100%) diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.asciidoc b/serverless/pages/explore-your-data-ml-nlp-classify-text.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-classify-text.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.mdx b/serverless/pages/explore-your-data-ml-nlp-classify-text.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-classify-text.mdx rename to serverless/pages/explore-your-data-ml-nlp-classify-text.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.asciidoc b/serverless/pages/explore-your-data-ml-nlp-deploy-model.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-deploy-model.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.mdx b/serverless/pages/explore-your-data-ml-nlp-deploy-model.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-deploy-model.mdx rename to serverless/pages/explore-your-data-ml-nlp-deploy-model.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc b/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.mdx b/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-deploy-trained-models.mdx rename to serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-elser.asciidoc b/serverless/pages/explore-your-data-ml-nlp-elser.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-elser.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-elser.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-elser.mdx b/serverless/pages/explore-your-data-ml-nlp-elser.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-elser.mdx rename to serverless/pages/explore-your-data-ml-nlp-elser.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc b/serverless/pages/explore-your-data-ml-nlp-examples.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-examples.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-examples.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-examples.mdx b/serverless/pages/explore-your-data-ml-nlp-examples.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-examples.mdx rename to serverless/pages/explore-your-data-ml-nlp-examples.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.asciidoc b/serverless/pages/explore-your-data-ml-nlp-extract-info.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-extract-info.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.mdx b/serverless/pages/explore-your-data-ml-nlp-extract-info.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-extract-info.mdx rename to serverless/pages/explore-your-data-ml-nlp-extract-info.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-import-model.asciidoc b/serverless/pages/explore-your-data-ml-nlp-import-model.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-import-model.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-import-model.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-import-model.mdx b/serverless/pages/explore-your-data-ml-nlp-import-model.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-import-model.mdx rename to serverless/pages/explore-your-data-ml-nlp-import-model.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-inference.asciidoc b/serverless/pages/explore-your-data-ml-nlp-inference.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-inference.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-inference.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-inference.mdx b/serverless/pages/explore-your-data-ml-nlp-inference.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-inference.mdx rename to serverless/pages/explore-your-data-ml-nlp-inference.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc b/serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.mdx b/serverless/pages/explore-your-data-ml-nlp-lang-ident.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-lang-ident.mdx rename to serverless/pages/explore-your-data-ml-nlp-lang-ident.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc b/serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.mdx b/serverless/pages/explore-your-data-ml-nlp-model-ref.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-model-ref.mdx rename to serverless/pages/explore-your-data-ml-nlp-model-ref.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.asciidoc b/serverless/pages/explore-your-data-ml-nlp-ner-example.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-ner-example.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.mdx b/serverless/pages/explore-your-data-ml-nlp-ner-example.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-ner-example.mdx rename to serverless/pages/explore-your-data-ml-nlp-ner-example.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc b/serverless/pages/explore-your-data-ml-nlp-ootb-models.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-ootb-models.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.mdx b/serverless/pages/explore-your-data-ml-nlp-ootb-models.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-ootb-models.mdx rename to serverless/pages/explore-your-data-ml-nlp-ootb-models.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.asciidoc b/serverless/pages/explore-your-data-ml-nlp-search-compare.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-search-compare.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.mdx b/serverless/pages/explore-your-data-ml-nlp-search-compare.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-search-compare.mdx rename to serverless/pages/explore-your-data-ml-nlp-search-compare.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc b/serverless/pages/explore-your-data-ml-nlp-select-model.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-select-model.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-select-model.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-select-model.mdx b/serverless/pages/explore-your-data-ml-nlp-select-model.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-select-model.mdx rename to serverless/pages/explore-your-data-ml-nlp-select-model.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc b/serverless/pages/explore-your-data-ml-nlp-test-inference.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-test-inference.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.mdx b/serverless/pages/explore-your-data-ml-nlp-test-inference.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-test-inference.mdx rename to serverless/pages/explore-your-data-ml-nlp-test-inference.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.asciidoc b/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.asciidoc rename to serverless/pages/explore-your-data-ml-nlp-text-embedding-example.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.mdx b/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp-text-embedding-example.mdx rename to serverless/pages/explore-your-data-ml-nlp-text-embedding-example.mdx diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc b/serverless/pages/explore-your-data-ml-nlp.asciidoc similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp.asciidoc rename to serverless/pages/explore-your-data-ml-nlp.asciidoc diff --git a/serverless/pages/hidden/explore-your-data-ml-nlp.mdx b/serverless/pages/explore-your-data-ml-nlp.mdx similarity index 100% rename from serverless/pages/hidden/explore-your-data-ml-nlp.mdx rename to serverless/pages/explore-your-data-ml-nlp.mdx From 19d1f435bbc52967a2b0a67674a8bc4707bb50cc Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 17:08:47 -0600 Subject: [PATCH 23/25] use asciidoc-dir --- serverless/index-serverless-devtools.asciidoc | 4 ++-- serverless/index-serverless-elasticsearch.asciidoc | 4 ++-- serverless/index-serverless-general.asciidoc | 4 ++-- serverless/index-serverless-project-settings.asciidoc | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/serverless/index-serverless-devtools.asciidoc b/serverless/index-serverless-devtools.asciidoc index f5aa095894..d4c2324b30 100644 --- a/serverless/index-serverless-devtools.asciidoc +++ b/serverless/index-serverless-devtools.asciidoc @@ -1,7 +1,7 @@ :doctype: book -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] +include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] +include::{asciidoc-dir}/../../shared/attributes.asciidoc[] [[developer-tools]] == Dev tools diff --git a/serverless/index-serverless-elasticsearch.asciidoc b/serverless/index-serverless-elasticsearch.asciidoc index bcf3a7cc8e..17c141ec84 100644 --- a/serverless/index-serverless-elasticsearch.asciidoc +++ b/serverless/index-serverless-elasticsearch.asciidoc @@ -1,7 +1,7 @@ :doctype: book -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] +include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] +include::{asciidoc-dir}/../../shared/attributes.asciidoc[] [[what-is-elasticsearch-serverless]] == Elasticsearch diff --git a/serverless/index-serverless-general.asciidoc b/serverless/index-serverless-general.asciidoc index 6faf611ec3..08dbf303a9 100644 --- a/serverless/index-serverless-general.asciidoc +++ b/serverless/index-serverless-general.asciidoc @@ -1,7 +1,7 @@ :doctype: book -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] +include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] +include::{asciidoc-dir}/../../shared/attributes.asciidoc[] [[intro]] == Welcome to Elastic serverless diff --git a/serverless/index-serverless-project-settings.asciidoc b/serverless/index-serverless-project-settings.asciidoc index 10caa90b9e..96107f797e 100644 --- a/serverless/index-serverless-project-settings.asciidoc +++ b/serverless/index-serverless-project-settings.asciidoc @@ -1,7 +1,7 @@ :doctype: book -include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -include::{docs-root}/shared/attributes.asciidoc[] +include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] +include::{asciidoc-dir}/../../shared/attributes.asciidoc[] [[project-and-management-settings]] == Project and management settings From 77969a9992fccd3133bb3953f7f21236831cd1c4 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 17:48:03 -0600 Subject: [PATCH 24/25] update id to fix redirect --- serverless/index-serverless-devtools.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/serverless/index-serverless-devtools.asciidoc b/serverless/index-serverless-devtools.asciidoc index d4c2324b30..362cb54a51 100644 --- a/serverless/index-serverless-devtools.asciidoc +++ b/serverless/index-serverless-devtools.asciidoc @@ -3,7 +3,7 @@ include::{asciidoc-dir}/../../shared/versions/stack/master.asciidoc[] include::{asciidoc-dir}/../../shared/attributes.asciidoc[] -[[developer-tools]] +[[devtools-developer-tools]] == Dev tools include::./pages/general-developer-tools.asciidoc[leveloffset=+2] From 531df89fd94edf832f1ae288369324160cec2941 Mon Sep 17 00:00:00 2001 From: Colleen McGinnis Date: Mon, 4 Nov 2024 17:50:48 -0600 Subject: [PATCH 25/25] comment out description and keywords --- serverless/pages/action-connectors.asciidoc | 4 ++-- serverless/pages/api-keys.asciidoc | 4 ++-- serverless/pages/apis-elasticsearch-conventions.asciidoc | 4 ++-- serverless/pages/apis-http-apis.asciidoc | 4 ++-- serverless/pages/apis-kibana-conventions.asciidoc | 4 ++-- serverless/pages/clients-dot-net-getting-started.asciidoc | 4 ++-- serverless/pages/clients-go-getting-started.asciidoc | 4 ++-- serverless/pages/clients-java-getting-started.asciidoc | 4 ++-- serverless/pages/clients-nodejs-getting-started.asciidoc | 4 ++-- serverless/pages/clients-php-getting-started.asciidoc | 4 ++-- serverless/pages/clients-python-getting-started.asciidoc | 4 ++-- serverless/pages/clients-ruby-getting-started.asciidoc | 4 ++-- serverless/pages/clients.asciidoc | 4 ++-- serverless/pages/cloud-regions.asciidoc | 4 ++-- serverless/pages/custom-roles.asciidoc | 4 ++-- serverless/pages/data-views.asciidoc | 4 ++-- serverless/pages/debug-grok-expressions.asciidoc | 4 ++-- serverless/pages/debug-painless-scripts.asciidoc | 4 ++-- serverless/pages/developer-tools-troubleshooting.asciidoc | 4 ++-- serverless/pages/elasticsearch-developer-tools.asciidoc | 4 ++-- serverless/pages/explore-your-data-alerting.asciidoc | 4 ++-- .../pages/explore-your-data-discover-your-data.asciidoc | 4 ++-- .../pages/explore-your-data-ml-nlp-classify-text.asciidoc | 4 ++-- .../pages/explore-your-data-ml-nlp-deploy-model.asciidoc | 2 +- .../explore-your-data-ml-nlp-deploy-trained-models.asciidoc | 4 ++-- serverless/pages/explore-your-data-ml-nlp-elser.asciidoc | 4 ++-- serverless/pages/explore-your-data-ml-nlp-examples.asciidoc | 2 +- .../pages/explore-your-data-ml-nlp-extract-info.asciidoc | 4 ++-- .../pages/explore-your-data-ml-nlp-import-model.asciidoc | 2 +- serverless/pages/explore-your-data-ml-nlp-inference.asciidoc | 4 ++-- serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc | 4 ++-- serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc | 4 ++-- .../pages/explore-your-data-ml-nlp-ner-example.asciidoc | 2 +- .../pages/explore-your-data-ml-nlp-ootb-models.asciidoc | 4 ++-- .../pages/explore-your-data-ml-nlp-search-compare.asciidoc | 4 ++-- .../pages/explore-your-data-ml-nlp-select-model.asciidoc | 2 +- .../pages/explore-your-data-ml-nlp-test-inference.asciidoc | 4 ++-- .../explore-your-data-ml-nlp-text-embedding-example.asciidoc | 2 +- serverless/pages/explore-your-data-ml-nlp.asciidoc | 2 +- .../pages/explore-your-data-the-aggregations-api.asciidoc | 4 ++-- ...e-your-data-visualize-your-data-create-dashboards.asciidoc | 4 ++-- ...ur-data-visualize-your-data-create-visualizations.asciidoc | 4 ++-- .../pages/explore-your-data-visualize-your-data.asciidoc | 4 ++-- serverless/pages/explore-your-data.asciidoc | 4 ++-- serverless/pages/files.asciidoc | 4 ++-- serverless/pages/fleet-and-elastic-agent.asciidoc | 4 ++-- serverless/pages/general-developer-tools.asciidoc | 4 ++-- serverless/pages/get-started.asciidoc | 4 ++-- serverless/pages/index-management.asciidoc | 4 ++-- serverless/pages/ingest-pipelines.asciidoc | 4 ++-- .../pages/ingest-your-data-ingest-data-through-api.asciidoc | 4 ++-- ...-your-data-ingest-data-through-integrations-beats.asciidoc | 4 ++-- ...ingest-data-through-integrations-connector-client.asciidoc | 4 ++-- ...ur-data-ingest-data-through-integrations-logstash.asciidoc | 4 ++-- serverless/pages/ingest-your-data-upload-file.asciidoc | 4 ++-- serverless/pages/ingest-your-data.asciidoc | 4 ++-- serverless/pages/integrations.asciidoc | 4 ++-- serverless/pages/knn-search.asciidoc | 4 ++-- serverless/pages/logstash-pipelines.asciidoc | 4 ++-- serverless/pages/machine-learning.asciidoc | 4 ++-- serverless/pages/maintenance-windows.asciidoc | 4 ++-- .../pages/manage-access-to-org-from-existing-account.asciidoc | 4 ++-- serverless/pages/manage-access-to-org-user-roles.asciidoc | 4 ++-- serverless/pages/manage-access-to-org.asciidoc | 4 ++-- serverless/pages/manage-billing-check-subscription.asciidoc | 4 ++-- serverless/pages/manage-billing-history.asciidoc | 4 ++-- serverless/pages/manage-billing-monitor-usage.asciidoc | 4 ++-- serverless/pages/manage-billing-pricing-model.asciidoc | 4 ++-- serverless/pages/manage-billing-stop-project.asciidoc | 4 ++-- serverless/pages/manage-billing.asciidoc | 4 ++-- serverless/pages/manage-org.asciidoc | 4 ++-- serverless/pages/manage-your-project-rest-api.asciidoc | 4 ++-- serverless/pages/manage-your-project.asciidoc | 4 ++-- serverless/pages/maps.asciidoc | 4 ++-- serverless/pages/pricing.asciidoc | 4 ++-- serverless/pages/profile-queries-and-aggregations.asciidoc | 4 ++-- serverless/pages/project-and-management-settings.asciidoc | 4 ++-- serverless/pages/project-settings.asciidoc | 4 ++-- serverless/pages/reports.asciidoc | 4 ++-- serverless/pages/rules.asciidoc | 4 ++-- serverless/pages/run-api-requests-in-the-console.asciidoc | 4 ++-- serverless/pages/saved-objects.asciidoc | 4 ++-- serverless/pages/search-playground.asciidoc | 4 ++-- serverless/pages/search-with-synonyms.asciidoc | 4 ++-- .../pages/search-your-data-semantic-search-elser.asciidoc | 4 ++-- serverless/pages/search-your-data-semantic-search.asciidoc | 4 ++-- serverless/pages/search-your-data-the-search-api.asciidoc | 4 ++-- serverless/pages/search-your-data.asciidoc | 4 ++-- serverless/pages/serverless-differences.asciidoc | 4 ++-- serverless/pages/service-status.asciidoc | 2 +- serverless/pages/sign-up.asciidoc | 4 ++-- serverless/pages/spaces.asciidoc | 2 +- serverless/pages/tags.asciidoc | 4 ++-- serverless/pages/technical-preview-limitations.asciidoc | 4 ++-- serverless/pages/transforms.asciidoc | 4 ++-- serverless/pages/user-profile.asciidoc | 4 ++-- serverless/pages/visualize-library.asciidoc | 2 +- serverless/pages/what-is-elasticsearch-serverless.asciidoc | 4 ++-- serverless/pages/what-is-serverless.asciidoc | 2 +- 99 files changed, 187 insertions(+), 187 deletions(-) diff --git a/serverless/pages/action-connectors.asciidoc b/serverless/pages/action-connectors.asciidoc index 2c18c1955e..4c8d8d8a7b 100644 --- a/serverless/pages/action-connectors.asciidoc +++ b/serverless/pages/action-connectors.asciidoc @@ -1,8 +1,8 @@ [[action-connectors]] = {connectors-app} -:description: Configure connections to third party systems for use in cases and rules. -:keywords: serverless +// :description: Configure connections to third party systems for use in cases and rules. +// :keywords: serverless preview:[] diff --git a/serverless/pages/api-keys.asciidoc b/serverless/pages/api-keys.asciidoc index f181ffc222..794f8c165f 100644 --- a/serverless/pages/api-keys.asciidoc +++ b/serverless/pages/api-keys.asciidoc @@ -1,8 +1,8 @@ [[api-keys]] = {api-keys-app} -:description: API keys allow access to the {stack} on behalf of a user. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: API keys allow access to the {stack} on behalf of a user. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/apis-elasticsearch-conventions.asciidoc b/serverless/pages/apis-elasticsearch-conventions.asciidoc index ec204ca246..91287b5412 100644 --- a/serverless/pages/apis-elasticsearch-conventions.asciidoc +++ b/serverless/pages/apis-elasticsearch-conventions.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-api-conventions]] = Elasticsearch API conventions -:description: The {es} REST APIs have conventions for headers and request bodies. -:keywords: serverless, elasticsearch, API, reference +// :description: The {es} REST APIs have conventions for headers and request bodies. +// :keywords: serverless, elasticsearch, API, reference preview:[] diff --git a/serverless/pages/apis-http-apis.asciidoc b/serverless/pages/apis-http-apis.asciidoc index 5d7feb08e7..d60e8eda58 100644 --- a/serverless/pages/apis-http-apis.asciidoc +++ b/serverless/pages/apis-http-apis.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-http-apis]] = REST APIs -:description: {es} and {kib} expose REST APIs that can be called directly to configure and access {stack} features. -:keywords: serverless, elasticsearch, http, rest, overview +// :description: {es} and {kib} expose REST APIs that can be called directly to configure and access {stack} features. +// :keywords: serverless, elasticsearch, http, rest, overview preview:[] diff --git a/serverless/pages/apis-kibana-conventions.asciidoc b/serverless/pages/apis-kibana-conventions.asciidoc index a07af9335b..5e47fbb50f 100644 --- a/serverless/pages/apis-kibana-conventions.asciidoc +++ b/serverless/pages/apis-kibana-conventions.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-kibana-api-conventions]] = Management API conventions -:description: The Management APIs for {serverless-short} have request header conventions. -:keywords: serverless, kibana, API, reference +// :description: The Management APIs for {serverless-short} have request header conventions. +// :keywords: serverless, kibana, API, reference preview:[] diff --git a/serverless/pages/clients-dot-net-getting-started.asciidoc b/serverless/pages/clients-dot-net-getting-started.asciidoc index 9a9bf61c33..18f021625b 100644 --- a/serverless/pages/clients-dot-net-getting-started.asciidoc +++ b/serverless/pages/clients-dot-net-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-dot-net-client-getting-started]] = Get started with the serverless .NET client -:description: Set up and use the .NET client for {es3}. -:keywords: serverless, elasticsearch, .net, how to +// :description: Set up and use the .NET client for {es3}. +// :keywords: serverless, elasticsearch, .net, how to preview:[] diff --git a/serverless/pages/clients-go-getting-started.asciidoc b/serverless/pages/clients-go-getting-started.asciidoc index ac807bcad9..b3612da109 100644 --- a/serverless/pages/clients-go-getting-started.asciidoc +++ b/serverless/pages/clients-go-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-go-client-getting-started]] = Get started with the serverless Go Client -:description: Set up and use the Go client for {es3}. -:keywords: serverless, elasticsearch, go, how to +// :description: Set up and use the Go client for {es3}. +// :keywords: serverless, elasticsearch, go, how to preview:[] diff --git a/serverless/pages/clients-java-getting-started.asciidoc b/serverless/pages/clients-java-getting-started.asciidoc index 317644ed25..81ad5abc98 100644 --- a/serverless/pages/clients-java-getting-started.asciidoc +++ b/serverless/pages/clients-java-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-java-client-getting-started]] = Get started with the serverless Java client -:description: Set up and use the Java client for {es3}. -:keywords: serverless, elasticsearch, java, how to +// :description: Set up and use the Java client for {es3}. +// :keywords: serverless, elasticsearch, java, how to preview:[] diff --git a/serverless/pages/clients-nodejs-getting-started.asciidoc b/serverless/pages/clients-nodejs-getting-started.asciidoc index cdf3b6234b..61edade93c 100644 --- a/serverless/pages/clients-nodejs-getting-started.asciidoc +++ b/serverless/pages/clients-nodejs-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-nodejs-client-getting-started]] = Get started with the serverless Node.js client -:description: Set up and use the Node.js client for {es3}. -:keywords: serverless, elasticsearch, nodejs, how to +// :description: Set up and use the Node.js client for {es3}. +// :keywords: serverless, elasticsearch, nodejs, how to preview:[] diff --git a/serverless/pages/clients-php-getting-started.asciidoc b/serverless/pages/clients-php-getting-started.asciidoc index 6fd5e0ea15..2821f30ad6 100644 --- a/serverless/pages/clients-php-getting-started.asciidoc +++ b/serverless/pages/clients-php-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-php-client-getting-started]] = Get started with the serverless PHP client -:description: Set up and use the PHP client for {es3}. -:keywords: serverless, elasticsearch, php, how to +// :description: Set up and use the PHP client for {es3}. +// :keywords: serverless, elasticsearch, php, how to preview:[] diff --git a/serverless/pages/clients-python-getting-started.asciidoc b/serverless/pages/clients-python-getting-started.asciidoc index ed26a37ac6..8799db8f13 100644 --- a/serverless/pages/clients-python-getting-started.asciidoc +++ b/serverless/pages/clients-python-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-python-client-getting-started]] = Get started with the serverless Python client -:description: Set up and use the Python client for {es3}. -:keywords: serverless, elasticsearch, python, how to +// :description: Set up and use the Python client for {es3}. +// :keywords: serverless, elasticsearch, python, how to preview:[] diff --git a/serverless/pages/clients-ruby-getting-started.asciidoc b/serverless/pages/clients-ruby-getting-started.asciidoc index 8fccb0ad2d..c35f38f8e6 100644 --- a/serverless/pages/clients-ruby-getting-started.asciidoc +++ b/serverless/pages/clients-ruby-getting-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ruby-client-getting-started]] = Get started with the serverless Ruby client -:description: Set up and use the Ruby client for {es3}. -:keywords: serverless, elasticsearch, ruby, how to +// :description: Set up and use the Ruby client for {es3}. +// :keywords: serverless, elasticsearch, ruby, how to preview:[] diff --git a/serverless/pages/clients.asciidoc b/serverless/pages/clients.asciidoc index df73b01804..8f26ec9ff0 100644 --- a/serverless/pages/clients.asciidoc +++ b/serverless/pages/clients.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-clients]] = Client libraries -:description: Index, search, and manage {es} data in your preferred language. -:keywords: serverless, elasticsearch, clients, overview +// :description: Index, search, and manage {es} data in your preferred language. +// :keywords: serverless, elasticsearch, clients, overview preview:[] diff --git a/serverless/pages/cloud-regions.asciidoc b/serverless/pages/cloud-regions.asciidoc index 76acebb40b..569845a222 100644 --- a/serverless/pages/cloud-regions.asciidoc +++ b/serverless/pages/cloud-regions.asciidoc @@ -1,8 +1,8 @@ [[regions]] = Serverless regions -:description: Index, search, and manage {es} data in your preferred language. -:keywords: serverless, regions, aws, cloud +// :description: Index, search, and manage {es} data in your preferred language. +// :keywords: serverless, regions, aws, cloud A region is the geographic area where the data center of the cloud provider that hosts your project is located. Review the available Elastic Cloud Serverless regions to decide which region to use. If you aren't sure which region to pick, choose one that is geographically close to you to reduce latency. diff --git a/serverless/pages/custom-roles.asciidoc b/serverless/pages/custom-roles.asciidoc index 1dab2709f5..041205c18f 100644 --- a/serverless/pages/custom-roles.asciidoc +++ b/serverless/pages/custom-roles.asciidoc @@ -1,8 +1,8 @@ [[custom-roles]] = Custom roles -:description: Create and manage roles that grant privileges within your project. -:keywords: serverless, Elasticsearch, Security +// :description: Create and manage roles that grant privileges within your project. +// :keywords: serverless, Elasticsearch, Security ifndef::serverlessCustomRoles[] coming:[] diff --git a/serverless/pages/data-views.asciidoc b/serverless/pages/data-views.asciidoc index 870e60ac9e..45769891c7 100644 --- a/serverless/pages/data-views.asciidoc +++ b/serverless/pages/data-views.asciidoc @@ -1,8 +1,8 @@ [[data-views]] = {data-sources-cap} -:description: Elastic requires a {data-source} to access the {es} data that you want to explore. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Elastic requires a {data-source} to access the {es} data that you want to explore. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/debug-grok-expressions.asciidoc b/serverless/pages/debug-grok-expressions.asciidoc index 0624bdba93..df257d94bd 100644 --- a/serverless/pages/debug-grok-expressions.asciidoc +++ b/serverless/pages/debug-grok-expressions.asciidoc @@ -1,8 +1,8 @@ [[devtools-debug-grok-expressions]] = Grok Debugger -:description: Build and debug grok patterns before you use them in your data processing pipelines. -:keywords: serverless, dev tools, how-to +// :description: Build and debug grok patterns before you use them in your data processing pipelines. +// :keywords: serverless, dev tools, how-to preview:[] diff --git a/serverless/pages/debug-painless-scripts.asciidoc b/serverless/pages/debug-painless-scripts.asciidoc index 5e96e536be..045aaaa782 100644 --- a/serverless/pages/debug-painless-scripts.asciidoc +++ b/serverless/pages/debug-painless-scripts.asciidoc @@ -1,8 +1,8 @@ [[devtools-debug-painless-scripts]] = Painless Lab -:description: Use our interactive code editor to test and debug Painless scripts in real-time. -:keywords: serverless, dev tools, how-to +// :description: Use our interactive code editor to test and debug Painless scripts in real-time. +// :keywords: serverless, dev tools, how-to preview:[] diff --git a/serverless/pages/developer-tools-troubleshooting.asciidoc b/serverless/pages/developer-tools-troubleshooting.asciidoc index a7c09a63cd..e79a649e8b 100644 --- a/serverless/pages/developer-tools-troubleshooting.asciidoc +++ b/serverless/pages/developer-tools-troubleshooting.asciidoc @@ -1,8 +1,8 @@ [[devtools-dev-tools-troubleshooting]] = Troubleshooting -:description: Troubleshoot searches. -:keywords: serverless, troubleshooting +// :description: Troubleshoot searches. +// :keywords: serverless, troubleshooting preview:[] diff --git a/serverless/pages/elasticsearch-developer-tools.asciidoc b/serverless/pages/elasticsearch-developer-tools.asciidoc index 41b5277626..1496cc21ff 100644 --- a/serverless/pages/elasticsearch-developer-tools.asciidoc +++ b/serverless/pages/elasticsearch-developer-tools.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-dev-tools]] = Developer tools -:description: Elastic tools for developers. -:keywords: serverless, elasticsearch, overview +// :description: Elastic tools for developers. +// :keywords: serverless, elasticsearch, overview preview:[] diff --git a/serverless/pages/explore-your-data-alerting.asciidoc b/serverless/pages/explore-your-data-alerting.asciidoc index c055cef4b7..d83297b4ae 100644 --- a/serverless/pages/explore-your-data-alerting.asciidoc +++ b/serverless/pages/explore-your-data-alerting.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-explore-your-data-alerting]] = Manage alerting rules -:description: Define when to generate alerts and notifications with alerting rules. -:keywords: serverless, elasticsearch, alerting, how-to +// :description: Define when to generate alerts and notifications with alerting rules. +// :keywords: serverless, elasticsearch, alerting, how-to ++++ Alerts diff --git a/serverless/pages/explore-your-data-discover-your-data.asciidoc b/serverless/pages/explore-your-data-discover-your-data.asciidoc index ba24c07299..bfa86933cf 100644 --- a/serverless/pages/explore-your-data-discover-your-data.asciidoc +++ b/serverless/pages/explore-your-data-discover-your-data.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-explore-your-data-discover-your-data]] = Discover your data -:description: Learn how to use Discover to gain insights into your data. -:keywords: serverless, elasticsearch, discover data, how to +// :description: Learn how to use Discover to gain insights into your data. +// :keywords: serverless, elasticsearch, discover data, how to preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-classify-text.asciidoc b/serverless/pages/explore-your-data-ml-nlp-classify-text.asciidoc index 64066dd208..e4928c9676 100644 --- a/serverless/pages/explore-your-data-ml-nlp-classify-text.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-classify-text.asciidoc @@ -1,7 +1,7 @@ = Classify text -:description: NLP tasks that classify input text or determine the language of text. -:keywords: serverless, elasticsearch, tbd +// :description: NLP tasks that classify input text or determine the language of text. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-deploy-model.asciidoc b/serverless/pages/explore-your-data-ml-nlp-deploy-model.asciidoc index dc71f9c26c..c45fae60b4 100644 --- a/serverless/pages/explore-your-data-ml-nlp-deploy-model.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-deploy-model.asciidoc @@ -1,6 +1,6 @@ = Deploy the model in your cluster -:description: Description to be written +// :description: Description to be written preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.asciidoc b/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.asciidoc index dc7b548682..e4fc0a8f3d 100644 --- a/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-deploy-trained-models.asciidoc @@ -1,7 +1,7 @@ = Deploy trained models -:description: You can import trained models into your cluster and configure them for specific NLP tasks. -:keywords: serverless, elasticsearch, tbd +// :description: You can import trained models into your cluster and configure them for specific NLP tasks. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-elser.asciidoc b/serverless/pages/explore-your-data-ml-nlp-elser.asciidoc index 24a55adbb6..ef2312da40 100644 --- a/serverless/pages/explore-your-data-ml-nlp-elser.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-elser.asciidoc @@ -1,7 +1,7 @@ = ELSER – Elastic Learned Sparse EncodeR -:description: ELSER is a learned sparse ranking model trained by Elastic. -:keywords: serverless, elasticsearch, tbd +// :description: ELSER is a learned sparse ranking model trained by Elastic. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-examples.asciidoc b/serverless/pages/explore-your-data-ml-nlp-examples.asciidoc index 8ee76afffb..304d9c446f 100644 --- a/serverless/pages/explore-your-data-ml-nlp-examples.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-examples.asciidoc @@ -1,6 +1,6 @@ = Examples -:description: Description to be written +// :description: Description to be written preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-extract-info.asciidoc b/serverless/pages/explore-your-data-ml-nlp-extract-info.asciidoc index 5b63e0484a..80cfb8c7b4 100644 --- a/serverless/pages/explore-your-data-ml-nlp-extract-info.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-extract-info.asciidoc @@ -1,7 +1,7 @@ = Extract information -:description: NLP tasks that extract information from unstructured text. -:keywords: serverless, elasticsearch, tbd +// :description: NLP tasks that extract information from unstructured text. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-import-model.asciidoc b/serverless/pages/explore-your-data-ml-nlp-import-model.asciidoc index b08fdcd82b..45887244b2 100644 --- a/serverless/pages/explore-your-data-ml-nlp-import-model.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-import-model.asciidoc @@ -1,6 +1,6 @@ = Import the trained model and vocabulary -:keywords: serverless, elasticsearch, tbd +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-inference.asciidoc b/serverless/pages/explore-your-data-ml-nlp-inference.asciidoc index bc643e6bea..6095f0ca1f 100644 --- a/serverless/pages/explore-your-data-ml-nlp-inference.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-inference.asciidoc @@ -1,7 +1,7 @@ = Add NLP {infer} to ingest pipelines -:description: You can import trained models into your cluster and configure them for specific NLP tasks. -:keywords: serverless, elasticsearch, tbd +// :description: You can import trained models into your cluster and configure them for specific NLP tasks. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc b/serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc index a92e421baa..e389d91fa1 100644 --- a/serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-lang-ident.asciidoc @@ -1,7 +1,7 @@ = Language identification -:description: Language identification is an NLP task and a model that enables you to determine the language of text. -:keywords: serverless, elasticsearch, tbd +// :description: Language identification is an NLP task and a model that enables you to determine the language of text. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc b/serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc index 3832421e5b..8eb65c1f72 100644 --- a/serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-model-ref.asciidoc @@ -1,7 +1,7 @@ = Compatible third party NLP models -:description: The list of compatible third party NLP models. -:keywords: ml, reference, analyze +// :description: The list of compatible third party NLP models. +// :keywords: ml, reference, analyze preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-ner-example.asciidoc b/serverless/pages/explore-your-data-ml-nlp-ner-example.asciidoc index 4fbd454c17..3543d8dcde 100644 --- a/serverless/pages/explore-your-data-ml-nlp-ner-example.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-ner-example.asciidoc @@ -1,6 +1,6 @@ = How to deploy named entity recognition -:description: Description to be written +// :description: Description to be written preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-ootb-models.asciidoc b/serverless/pages/explore-your-data-ml-nlp-ootb-models.asciidoc index f92145c1e3..5f0713a42f 100644 --- a/serverless/pages/explore-your-data-ml-nlp-ootb-models.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-ootb-models.asciidoc @@ -1,7 +1,7 @@ = Elastic trained models -:description: Models trained and provided by Elastic -:keywords: serverless, elasticsearch, tbd +// :description: Models trained and provided by Elastic +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-search-compare.asciidoc b/serverless/pages/explore-your-data-ml-nlp-search-compare.asciidoc index 5f5cdeead6..0b16c7cb7c 100644 --- a/serverless/pages/explore-your-data-ml-nlp-search-compare.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-search-compare.asciidoc @@ -1,7 +1,7 @@ = Search and compare text -:description: NLP tasks for generate embeddings which can be used to search in text or compare different peieces of text. -:keywords: serverless, elasticsearch, tbd +// :description: NLP tasks for generate embeddings which can be used to search in text or compare different peieces of text. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-select-model.asciidoc b/serverless/pages/explore-your-data-ml-nlp-select-model.asciidoc index feaa640a42..b02dafa43b 100644 --- a/serverless/pages/explore-your-data-ml-nlp-select-model.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-select-model.asciidoc @@ -1,6 +1,6 @@ = Select a trained model -:keywords: serverless, elasticsearch, tbd +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-test-inference.asciidoc b/serverless/pages/explore-your-data-ml-nlp-test-inference.asciidoc index 50f7b25d66..39f93ffecd 100644 --- a/serverless/pages/explore-your-data-ml-nlp-test-inference.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-test-inference.asciidoc @@ -1,7 +1,7 @@ = Try it out -:description: You can import trained models into your cluster and configure them for specific NLP tasks. -:keywords: serverless, elasticsearch, tbd +// :description: You can import trained models into your cluster and configure them for specific NLP tasks. +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.asciidoc b/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.asciidoc index e202e9c4d3..2fede73c04 100644 --- a/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp-text-embedding-example.asciidoc @@ -1,6 +1,6 @@ = How to deploy a text embedding model and use it for semantic search -:description: Description to be written +// :description: Description to be written preview:[] diff --git a/serverless/pages/explore-your-data-ml-nlp.asciidoc b/serverless/pages/explore-your-data-ml-nlp.asciidoc index 3e7f3869dc..7d2ac257ab 100644 --- a/serverless/pages/explore-your-data-ml-nlp.asciidoc +++ b/serverless/pages/explore-your-data-ml-nlp.asciidoc @@ -1,6 +1,6 @@ = Machine Learning - Natural Language Processing -:keywords: serverless, elasticsearch, tbd +// :keywords: serverless, elasticsearch, tbd preview:[] diff --git a/serverless/pages/explore-your-data-the-aggregations-api.asciidoc b/serverless/pages/explore-your-data-the-aggregations-api.asciidoc index 8449d71dfd..502994c062 100644 --- a/serverless/pages/explore-your-data-the-aggregations-api.asciidoc +++ b/serverless/pages/explore-your-data-the-aggregations-api.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-explore-your-data-aggregations]] = Aggregations -:description: Aggregate and summarize your {es} data. -:keywords: serverless, elasticsearch, aggregations, reference +// :description: Aggregate and summarize your {es} data. +// :keywords: serverless, elasticsearch, aggregations, reference preview:[] diff --git a/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc b/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc index 6c3f7e5155..ad5bebd488 100644 --- a/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc +++ b/serverless/pages/explore-your-data-visualize-your-data-create-dashboards.asciidoc @@ -1,8 +1,8 @@ [role="exclude",id="elasticsearch-explore-your-data-dashboards"] = Create dashboards -:description: Create dashboards to visualize and monitor your {es} data. -:keywords: serverless, elasticsearch, dashboards, how to +// :description: Create dashboards to visualize and monitor your {es} data. +// :keywords: serverless, elasticsearch, dashboards, how to preview:[] diff --git a/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc b/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc index da47b2c2b4..ad1506acc5 100644 --- a/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc +++ b/serverless/pages/explore-your-data-visualize-your-data-create-visualizations.asciidoc @@ -1,8 +1,8 @@ [role="exclude",id="elasticsearch-explore-your-data-visualizations"] = Create visualizations -:description: Create charts, graphs, maps, and more from your {es} data. -:keywords: serverless, elasticsearch, visualize, how to +// :description: Create charts, graphs, maps, and more from your {es} data. +// :keywords: serverless, elasticsearch, visualize, how to preview:[] diff --git a/serverless/pages/explore-your-data-visualize-your-data.asciidoc b/serverless/pages/explore-your-data-visualize-your-data.asciidoc index 8c8ae1b5ed..8aabda1783 100644 --- a/serverless/pages/explore-your-data-visualize-your-data.asciidoc +++ b/serverless/pages/explore-your-data-visualize-your-data.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-explore-your-data-visualize-your-data]] = Visualize your data -:description: Build dynamic dashboards and visualizations for your {es} data. -:keywords: serverless, elasticsearch, visualize, how to +// :description: Build dynamic dashboards and visualizations for your {es} data. +// :keywords: serverless, elasticsearch, visualize, how to preview:[] diff --git a/serverless/pages/explore-your-data.asciidoc b/serverless/pages/explore-your-data.asciidoc index aea1310171..ecaa0dd693 100644 --- a/serverless/pages/explore-your-data.asciidoc +++ b/serverless/pages/explore-your-data.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-explore-your-data]] = Explore your data -:description: Turn {es} data into actionable insights with aggregations, visualizations, and alerts -:keywords: serverless, elasticsearch, explore, overview +// :description: Turn {es} data into actionable insights with aggregations, visualizations, and alerts +// :keywords: serverless, elasticsearch, explore, overview preview:[] diff --git a/serverless/pages/files.asciidoc b/serverless/pages/files.asciidoc index a0b9d90734..3e693187f2 100644 --- a/serverless/pages/files.asciidoc +++ b/serverless/pages/files.asciidoc @@ -1,8 +1,8 @@ [[files]] = {files-app} -:description: Manage files that are stored in Elastic. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Manage files that are stored in Elastic. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/fleet-and-elastic-agent.asciidoc b/serverless/pages/fleet-and-elastic-agent.asciidoc index f5162543af..ae57b48bf8 100644 --- a/serverless/pages/fleet-and-elastic-agent.asciidoc +++ b/serverless/pages/fleet-and-elastic-agent.asciidoc @@ -1,8 +1,8 @@ [[fleet-and-elastic-agent]] = Fleet and Elastic Agent -:description: Centrally manage your Elastic Agents in Fleet -:keywords: serverless, ingest, fleet, elastic agent +// :description: Centrally manage your Elastic Agents in Fleet +// :keywords: serverless, ingest, fleet, elastic agent preview:[] diff --git a/serverless/pages/general-developer-tools.asciidoc b/serverless/pages/general-developer-tools.asciidoc index b13659d57b..02e4f842ad 100644 --- a/serverless/pages/general-developer-tools.asciidoc +++ b/serverless/pages/general-developer-tools.asciidoc @@ -1,5 +1,5 @@ -:description: Use our developer tools to interact with your data. -:keywords: serverless, dev tools, overview +// :description: Use our developer tools to interact with your data. +// :keywords: serverless, dev tools, overview preview:[] diff --git a/serverless/pages/get-started.asciidoc b/serverless/pages/get-started.asciidoc index 7433ab7ed8..6da3cd9f79 100644 --- a/serverless/pages/get-started.asciidoc +++ b/serverless/pages/get-started.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-get-started]] = Get started -:description: Get started with {es3} in a few steps -:keywords: serverless, elasticsearch, getstarted, overview +// :description: Get started with {es3} in a few steps +// :keywords: serverless, elasticsearch, getstarted, overview preview:[] diff --git a/serverless/pages/index-management.asciidoc b/serverless/pages/index-management.asciidoc index bb19e30b5f..49eea272ed 100644 --- a/serverless/pages/index-management.asciidoc +++ b/serverless/pages/index-management.asciidoc @@ -1,8 +1,8 @@ [[index-management]] = Index management -:description: Perform CRUD operations on indices and data streams. View index settings, mappings, and statistics. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Perform CRUD operations on indices and data streams. View index settings, mappings, and statistics. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/ingest-pipelines.asciidoc b/serverless/pages/ingest-pipelines.asciidoc index 2f49829487..a4a6cb470e 100644 --- a/serverless/pages/ingest-pipelines.asciidoc +++ b/serverless/pages/ingest-pipelines.asciidoc @@ -1,8 +1,8 @@ [[ingest-pipelines]] = {ingest-pipelines-cap} -:description: Create and manage {ingest-pipelines} to perform common transformations and enrichments on your data. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Create and manage {ingest-pipelines} to perform common transformations and enrichments on your data. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/ingest-your-data-ingest-data-through-api.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-api.asciidoc index 4844b3abb9..da8389fc6b 100644 --- a/serverless/pages/ingest-your-data-ingest-data-through-api.asciidoc +++ b/serverless/pages/ingest-your-data-ingest-data-through-api.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ingest-data-through-api]] = Ingest data through API -:description: Add data to {es} using HTTP APIs or a language client. -:keywords: serverless, elasticsearch, ingest, api, how to +// :description: Add data to {es} using HTTP APIs or a language client. +// :keywords: serverless, elasticsearch, ingest, api, how to preview:[] diff --git a/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc index 42d05a7a6d..d9af7cc1e6 100644 --- a/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc +++ b/serverless/pages/ingest-your-data-ingest-data-through-integrations-beats.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ingest-data-through-beats]] = Beats -:description: Use {beats} to ship operational data to {es}. -:keywords: serverless, elasticsearch, ingest, beats, how to +// :description: Use {beats} to ship operational data to {es}. +// :keywords: serverless, elasticsearch, ingest, beats, how to preview:[] diff --git a/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc index af38aa4d24..f3118a4aef 100644 --- a/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc +++ b/serverless/pages/ingest-your-data-ingest-data-through-integrations-connector-client.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ingest-data-through-integrations-connector-client]] = Connector clients -:description: Set up and deploy self-managed connectors that run on your own infrastructure. -:keywords: serverless, elasticsearch, ingest, connector, how to +// :description: Set up and deploy self-managed connectors that run on your own infrastructure. +// :keywords: serverless, elasticsearch, ingest, connector, how to [NOTE] ==== diff --git a/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc b/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc index c4d644cd37..8bf1a060b8 100644 --- a/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc +++ b/serverless/pages/ingest-your-data-ingest-data-through-integrations-logstash.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ingest-data-through-logstash]] = Logstash -:description: Use {ls} to ship data to {es}. -:keywords: serverless, elasticsearch, ingest, logstash, how to +// :description: Use {ls} to ship data to {es}. +// :keywords: serverless, elasticsearch, ingest, logstash, how to preview:[] diff --git a/serverless/pages/ingest-your-data-upload-file.asciidoc b/serverless/pages/ingest-your-data-upload-file.asciidoc index 2f9daab4a6..834a79799b 100644 --- a/serverless/pages/ingest-your-data-upload-file.asciidoc +++ b/serverless/pages/ingest-your-data-upload-file.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ingest-data-file-upload]] = Upload a file -:description: Add data to {es} using the File Uploader. -:keywords: serverless, elasticsearch, ingest, how to +// :description: Add data to {es} using the File Uploader. +// :keywords: serverless, elasticsearch, ingest, how to preview:[] diff --git a/serverless/pages/ingest-your-data.asciidoc b/serverless/pages/ingest-your-data.asciidoc index 585c9d57a9..e2b56d2114 100644 --- a/serverless/pages/ingest-your-data.asciidoc +++ b/serverless/pages/ingest-your-data.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-ingest-your-data]] = Ingest your data -:description: Add data to your {es} project. -:keywords: serverless, elasticsearch, ingest, overview +// :description: Add data to your {es} project. +// :keywords: serverless, elasticsearch, ingest, overview preview:[] diff --git a/serverless/pages/integrations.asciidoc b/serverless/pages/integrations.asciidoc index 85a39014ab..8b4425b62a 100644 --- a/serverless/pages/integrations.asciidoc +++ b/serverless/pages/integrations.asciidoc @@ -1,8 +1,8 @@ [[integrations]] = Integrations -:description: Use our pre-built integrations to connect your data to Elastic. -:keywords: serverless, ingest, integration +// :description: Use our pre-built integrations to connect your data to Elastic. +// :keywords: serverless, ingest, integration preview:[] diff --git a/serverless/pages/knn-search.asciidoc b/serverless/pages/knn-search.asciidoc index 587035ecb9..65d9777421 100644 --- a/serverless/pages/knn-search.asciidoc +++ b/serverless/pages/knn-search.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-knn-search]] = k-nearest neighbor (kNN) search -:description: Vector search with k-nearest neighbor (kNN). -:keywords: serverless, elasticsearch, search, vector, knn, ann +// :description: Vector search with k-nearest neighbor (kNN). +// :keywords: serverless, elasticsearch, search, vector, knn, ann preview:[] diff --git a/serverless/pages/logstash-pipelines.asciidoc b/serverless/pages/logstash-pipelines.asciidoc index 00a0db7e2f..7f2551cda8 100644 --- a/serverless/pages/logstash-pipelines.asciidoc +++ b/serverless/pages/logstash-pipelines.asciidoc @@ -1,8 +1,8 @@ [[logstash-pipelines]] = {ls-pipelines} -:description: Create, edit, and delete your {ls} pipeline configurations. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Create, edit, and delete your {ls} pipeline configurations. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/machine-learning.asciidoc b/serverless/pages/machine-learning.asciidoc index aa16218ad8..0d3839389b 100644 --- a/serverless/pages/machine-learning.asciidoc +++ b/serverless/pages/machine-learning.asciidoc @@ -1,8 +1,8 @@ [[machine-learning]] = {ml-cap} -:description: View, export, and import {ml} jobs and models. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: View, export, and import {ml} jobs and models. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/maintenance-windows.asciidoc b/serverless/pages/maintenance-windows.asciidoc index 0440a3e325..38d0eb3840 100644 --- a/serverless/pages/maintenance-windows.asciidoc +++ b/serverless/pages/maintenance-windows.asciidoc @@ -1,8 +1,8 @@ [[maintenance-windows]] = {maint-windows-cap} -:description: Suppress rule notifications for scheduled periods of time. -:keywords: serverless, Observability, Security +// :description: Suppress rule notifications for scheduled periods of time. +// :keywords: serverless, Observability, Security preview:[] diff --git a/serverless/pages/manage-access-to-org-from-existing-account.asciidoc b/serverless/pages/manage-access-to-org-from-existing-account.asciidoc index 0c5f77fca7..20607b9e16 100644 --- a/serverless/pages/manage-access-to-org-from-existing-account.asciidoc +++ b/serverless/pages/manage-access-to-org-from-existing-account.asciidoc @@ -1,8 +1,8 @@ [[general-join-organization-from-existing-cloud-account]] = Join an organization from an existing Elastic Cloud account -:description: Join a new organization and bring over your projects. -:keywords: serverless, general, organization, join, how to +// :description: Join a new organization and bring over your projects. +// :keywords: serverless, general, organization, join, how to preview:[] diff --git a/serverless/pages/manage-access-to-org-user-roles.asciidoc b/serverless/pages/manage-access-to-org-user-roles.asciidoc index 9c632a7f96..e236d53ba1 100644 --- a/serverless/pages/manage-access-to-org-user-roles.asciidoc +++ b/serverless/pages/manage-access-to-org-user-roles.asciidoc @@ -1,8 +1,8 @@ [[general-assign-user-roles]] = Assign user roles and privileges -:description: Manage the predefined set of roles and privileges for all your projects. -:keywords: serverless, general, organization, roles, how to +// :description: Manage the predefined set of roles and privileges for all your projects. +// :keywords: serverless, general, organization, roles, how to preview:[] diff --git a/serverless/pages/manage-access-to-org.asciidoc b/serverless/pages/manage-access-to-org.asciidoc index 2423b0f0e5..b0d0108c61 100644 --- a/serverless/pages/manage-access-to-org.asciidoc +++ b/serverless/pages/manage-access-to-org.asciidoc @@ -1,8 +1,8 @@ [[general-manage-access-to-organization]] = Invite your team -:description: Add members to your organization and projects. -:keywords: serverless, general, organization, overview +// :description: Add members to your organization and projects. +// :keywords: serverless, general, organization, overview To allow other users to interact with your projects, you must invite them to join your organization and grant them access to your organization resources and instances. diff --git a/serverless/pages/manage-billing-check-subscription.asciidoc b/serverless/pages/manage-billing-check-subscription.asciidoc index e03953bbb8..fe61ee1c66 100644 --- a/serverless/pages/manage-billing-check-subscription.asciidoc +++ b/serverless/pages/manage-billing-check-subscription.asciidoc @@ -1,8 +1,8 @@ [[general-check-subscription]] = Check your subscription -:description: Manage your account details and subscription level. -:keywords: serverless, general, billing, subscription +// :description: Manage your account details and subscription level. +// :keywords: serverless, general, billing, subscription preview:[] diff --git a/serverless/pages/manage-billing-history.asciidoc b/serverless/pages/manage-billing-history.asciidoc index d4eb192ad2..d65f3d5f50 100644 --- a/serverless/pages/manage-billing-history.asciidoc +++ b/serverless/pages/manage-billing-history.asciidoc @@ -1,8 +1,8 @@ [[general-billing-history]] = Check your billing history -:description: Monitor payments and billing receipts. -:keywords: serverless, general, billing, history +// :description: Monitor payments and billing receipts. +// :keywords: serverless, general, billing, history preview:[] diff --git a/serverless/pages/manage-billing-monitor-usage.asciidoc b/serverless/pages/manage-billing-monitor-usage.asciidoc index 065bf585e9..597c325f93 100644 --- a/serverless/pages/manage-billing-monitor-usage.asciidoc +++ b/serverless/pages/manage-billing-monitor-usage.asciidoc @@ -1,8 +1,8 @@ [[general-monitor-usage]] = Monitor your account usage -:description: Check the usage breakdown of your account. -:keywords: serverless, general, billing, usage +// :description: Check the usage breakdown of your account. +// :keywords: serverless, general, billing, usage preview:[] diff --git a/serverless/pages/manage-billing-pricing-model.asciidoc b/serverless/pages/manage-billing-pricing-model.asciidoc index 0da3fd6a08..5dfd818536 100644 --- a/serverless/pages/manage-billing-pricing-model.asciidoc +++ b/serverless/pages/manage-billing-pricing-model.asciidoc @@ -1,8 +1,8 @@ [[general-serverless-billing]] = Serverless billing dimensions -:description: Understand how usage affects serverless pricing. -:keywords: serverless, general, billing, pricing model +// :description: Understand how usage affects serverless pricing. +// :keywords: serverless, general, billing, pricing model preview:[] diff --git a/serverless/pages/manage-billing-stop-project.asciidoc b/serverless/pages/manage-billing-stop-project.asciidoc index 1b3d83546b..6500ac3d7e 100644 --- a/serverless/pages/manage-billing-stop-project.asciidoc +++ b/serverless/pages/manage-billing-stop-project.asciidoc @@ -1,8 +1,8 @@ [[general-billing-stop-project]] = Stop charges for a project -:description: How to stop charges for a project. -:keywords: serverless, general, billing +// :description: How to stop charges for a project. +// :keywords: serverless, general, billing preview:[] diff --git a/serverless/pages/manage-billing.asciidoc b/serverless/pages/manage-billing.asciidoc index 295b3e6e6f..101903e915 100644 --- a/serverless/pages/manage-billing.asciidoc +++ b/serverless/pages/manage-billing.asciidoc @@ -1,8 +1,8 @@ [[general-manage-billing]] = Manage billing of your organization -:description: Configure the billing details of your organization. -:keywords: serverless, general, billing, overview +// :description: Configure the billing details of your organization. +// :keywords: serverless, general, billing, overview ++++ Manage billing diff --git a/serverless/pages/manage-org.asciidoc b/serverless/pages/manage-org.asciidoc index 792f2607bf..a74ca77d27 100644 --- a/serverless/pages/manage-org.asciidoc +++ b/serverless/pages/manage-org.asciidoc @@ -1,8 +1,8 @@ [[general-manage-organization]] = Manage your organization -:description: Manage your instances, users, and settings. -:keywords: serverless, general, organization, overview +// :description: Manage your instances, users, and settings. +// :keywords: serverless, general, organization, overview preview:[] diff --git a/serverless/pages/manage-your-project-rest-api.asciidoc b/serverless/pages/manage-your-project-rest-api.asciidoc index b3c1336520..d32f5af62a 100644 --- a/serverless/pages/manage-your-project-rest-api.asciidoc +++ b/serverless/pages/manage-your-project-rest-api.asciidoc @@ -1,8 +1,8 @@ [[general-manage-project-with-api]] = Using the Project Management REST API -:description: Manage your organization's serverless projects using the REST API. -:keywords: serverless, project, manage, rest, api +// :description: Manage your organization's serverless projects using the REST API. +// :keywords: serverless, project, manage, rest, api preview:[] diff --git a/serverless/pages/manage-your-project.asciidoc b/serverless/pages/manage-your-project.asciidoc index 77da7f1ce1..7744ace3d1 100644 --- a/serverless/pages/manage-your-project.asciidoc +++ b/serverless/pages/manage-your-project.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-manage-project]] = Manage your projects -:description: Configure project-wide features and usage. -:keywords: serverless, elasticsearch, project, manage +// :description: Configure project-wide features and usage. +// :keywords: serverless, elasticsearch, project, manage preview:[] diff --git a/serverless/pages/maps.asciidoc b/serverless/pages/maps.asciidoc index 7785d68614..1da4049f57 100644 --- a/serverless/pages/maps.asciidoc +++ b/serverless/pages/maps.asciidoc @@ -1,8 +1,8 @@ [[maps]] = {maps-app} -:description: Create maps from your geographical data. -:keywords: serverless, Security +// :description: Create maps from your geographical data. +// :keywords: serverless, Security preview:[] diff --git a/serverless/pages/pricing.asciidoc b/serverless/pages/pricing.asciidoc index f06b57165f..34a35d9295 100644 --- a/serverless/pages/pricing.asciidoc +++ b/serverless/pages/pricing.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-billing]] = Elasticsearch billing dimensions -:description: Learn about how Elasticsearch usage affects pricing. -:keywords: serverless, elasticsearch, overview +// :description: Learn about how Elasticsearch usage affects pricing. +// :keywords: serverless, elasticsearch, overview preview:[] diff --git a/serverless/pages/profile-queries-and-aggregations.asciidoc b/serverless/pages/profile-queries-and-aggregations.asciidoc index 3fcddfb5cb..67f67f0d8d 100644 --- a/serverless/pages/profile-queries-and-aggregations.asciidoc +++ b/serverless/pages/profile-queries-and-aggregations.asciidoc @@ -1,8 +1,8 @@ [[devtools-profile-queries-and-aggregations]] = Search Profiler -:description: Diagnose and debug poorly performing search queries. -:keywords: serverless, dev tools, how-to +// :description: Diagnose and debug poorly performing search queries. +// :keywords: serverless, dev tools, how-to preview:[] diff --git a/serverless/pages/project-and-management-settings.asciidoc b/serverless/pages/project-and-management-settings.asciidoc index 64725d5d5b..262acb4fe3 100644 --- a/serverless/pages/project-and-management-settings.asciidoc +++ b/serverless/pages/project-and-management-settings.asciidoc @@ -1,5 +1,5 @@ -:description: Learn about capabilities available in multiple serverless solutions. -:keywords: serverless, observability, security, elasticsearch, overview +// :description: Learn about capabilities available in multiple serverless solutions. +// :keywords: serverless, observability, security, elasticsearch, overview preview:[] diff --git a/serverless/pages/project-settings.asciidoc b/serverless/pages/project-settings.asciidoc index dea1b6f52b..94aaf3564f 100644 --- a/serverless/pages/project-settings.asciidoc +++ b/serverless/pages/project-settings.asciidoc @@ -1,8 +1,8 @@ [[project-settings]] = Management settings -:description: Manage your indices, data views, saved objects, settings, and more from a central location in Elastic. -:keywords: serverless, management, overview +// :description: Manage your indices, data views, saved objects, settings, and more from a central location in Elastic. +// :keywords: serverless, management, overview ++++ Management diff --git a/serverless/pages/reports.asciidoc b/serverless/pages/reports.asciidoc index 505c38e051..533313c194 100644 --- a/serverless/pages/reports.asciidoc +++ b/serverless/pages/reports.asciidoc @@ -1,8 +1,8 @@ [[reports]] = {reports-app} -:description: View and manage generated reports. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: View and manage generated reports. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/rules.asciidoc b/serverless/pages/rules.asciidoc index 1b3dc2ff04..b8cf47bd88 100644 --- a/serverless/pages/rules.asciidoc +++ b/serverless/pages/rules.asciidoc @@ -1,8 +1,8 @@ [[rules]] = {rules-app} -:description: Alerting works by running checks on a schedule to detect conditions defined by a rule. -:keywords: serverless, Elasticsearch, alerting, learn +// :description: Alerting works by running checks on a schedule to detect conditions defined by a rule. +// :keywords: serverless, Elasticsearch, alerting, learn preview:[] diff --git a/serverless/pages/run-api-requests-in-the-console.asciidoc b/serverless/pages/run-api-requests-in-the-console.asciidoc index d17ed74dc2..eff6c45e9f 100644 --- a/serverless/pages/run-api-requests-in-the-console.asciidoc +++ b/serverless/pages/run-api-requests-in-the-console.asciidoc @@ -1,8 +1,8 @@ [[devtools-run-api-requests-in-the-console]] = Console -:description: Use the Console to interact with Elastic REST APIs. -:keywords: serverless, dev tools, how-to +// :description: Use the Console to interact with Elastic REST APIs. +// :keywords: serverless, dev tools, how-to preview:[] diff --git a/serverless/pages/saved-objects.asciidoc b/serverless/pages/saved-objects.asciidoc index f12290f4e8..bb8a4d7f8c 100644 --- a/serverless/pages/saved-objects.asciidoc +++ b/serverless/pages/saved-objects.asciidoc @@ -1,8 +1,8 @@ [[saved-objects]] = Saved objects -:description: Manage your saved objects, including dashboards, visualizations, maps, {data-sources}, and more. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Manage your saved objects, including dashboards, visualizations, maps, {data-sources}, and more. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/search-playground.asciidoc b/serverless/pages/search-playground.asciidoc index b93241827d..d9e986a920 100644 --- a/serverless/pages/search-playground.asciidoc +++ b/serverless/pages/search-playground.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-playground]] = Playground -:description: Test and edit Elasticsearch queries and chat with your data using LLMs. -:keywords: serverless, elasticsearch, search, playground, GenAI, LLMs +// :description: Test and edit Elasticsearch queries and chat with your data using LLMs. +// :keywords: serverless, elasticsearch, search, playground, GenAI, LLMs preview:[] diff --git a/serverless/pages/search-with-synonyms.asciidoc b/serverless/pages/search-with-synonyms.asciidoc index fd459c62a2..c38ea8e0df 100644 --- a/serverless/pages/search-with-synonyms.asciidoc +++ b/serverless/pages/search-with-synonyms.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-reference-search-with-synonyms]] = Full-text search with synonyms -:description: Use synonyms to search for words or phrases that have the same or similar meaning. -:keywords: serverless, elasticsearch, search, synonyms +// :description: Use synonyms to search for words or phrases that have the same or similar meaning. +// :keywords: serverless, elasticsearch, search, synonyms preview:[] diff --git a/serverless/pages/search-your-data-semantic-search-elser.asciidoc b/serverless/pages/search-your-data-semantic-search-elser.asciidoc index 186ff0242c..c14dc04698 100644 --- a/serverless/pages/search-your-data-semantic-search-elser.asciidoc +++ b/serverless/pages/search-your-data-semantic-search-elser.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-reference-semantic-search-elser]] = Tutorial: Semantic search with ELSER -:description: Perform semantic search using ELSER, an NLP model trained by Elastic. -:keywords: elasticsearch, elser, semantic search +// :description: Perform semantic search using ELSER, an NLP model trained by Elastic. +// :keywords: elasticsearch, elser, semantic search preview:[] diff --git a/serverless/pages/search-your-data-semantic-search.asciidoc b/serverless/pages/search-your-data-semantic-search.asciidoc index c4a6192a6b..c04f26e80c 100644 --- a/serverless/pages/search-your-data-semantic-search.asciidoc +++ b/serverless/pages/search-your-data-semantic-search.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-reference-semantic-search]] = Semantic search -:description: Find data based on the intent and contextual meaning of a search query with semantic search -:keywords: elasticsearch, elser, semantic search +// :description: Find data based on the intent and contextual meaning of a search query with semantic search +// :keywords: elasticsearch, elser, semantic search preview:[] diff --git a/serverless/pages/search-your-data-the-search-api.asciidoc b/serverless/pages/search-your-data-the-search-api.asciidoc index 2b04c4a460..1574332ae6 100644 --- a/serverless/pages/search-your-data-the-search-api.asciidoc +++ b/serverless/pages/search-your-data-the-search-api.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-search-your-data-the-search-api]] = The search API -:description: Run queries and aggregations with the search API. -:keywords: serverless, elasticsearch, API +// :description: Run queries and aggregations with the search API. +// :keywords: serverless, elasticsearch, API preview:[] diff --git a/serverless/pages/search-your-data.asciidoc b/serverless/pages/search-your-data.asciidoc index 7e873a6f09..b74c844cf1 100644 --- a/serverless/pages/search-your-data.asciidoc +++ b/serverless/pages/search-your-data.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-search-your-data]] = Search your data -:description: Use the search API to run queries on your data. -:keywords: serverless, elasticsearch, search +// :description: Use the search API to run queries on your data. +// :keywords: serverless, elasticsearch, search preview:[] diff --git a/serverless/pages/serverless-differences.asciidoc b/serverless/pages/serverless-differences.asciidoc index 2625abfb64..6deffb93ed 100644 --- a/serverless/pages/serverless-differences.asciidoc +++ b/serverless/pages/serverless-differences.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-differences]] = Differences from other Elasticsearch offerings -:description: Understand how serverless Elasticsearch differs from Elastic Cloud Hosted and self-managed offerings. -:keywords: serverless, elasticsearch +// :description: Understand how serverless Elasticsearch differs from Elastic Cloud Hosted and self-managed offerings. +// :keywords: serverless, elasticsearch ++++ Serverless differences diff --git a/serverless/pages/service-status.asciidoc b/serverless/pages/service-status.asciidoc index f132eaefb5..0e1610a86e 100644 --- a/serverless/pages/service-status.asciidoc +++ b/serverless/pages/service-status.asciidoc @@ -1,7 +1,7 @@ [[general-serverless-status]] = Monitor serverless status -:keywords: serverless +// :keywords: serverless Serverless projects run on cloud platforms, which may undergo changes in availability. When availability changes, Elastic makes sure to provide you with a current service status. diff --git a/serverless/pages/sign-up.asciidoc b/serverless/pages/sign-up.asciidoc index a2f9b972f4..f7a6b70e16 100644 --- a/serverless/pages/sign-up.asciidoc +++ b/serverless/pages/sign-up.asciidoc @@ -1,8 +1,8 @@ [[general-sign-up-trial]] = Get started with serverless -:description: Information about signing up for a serverless Elastic Cloud trial -:keywords: serverless, general, signup +// :description: Information about signing up for a serverless Elastic Cloud trial +// :keywords: serverless, general, signup There are two options to create serverless projects: diff --git a/serverless/pages/spaces.asciidoc b/serverless/pages/spaces.asciidoc index 969356d08d..929cff6f6c 100644 --- a/serverless/pages/spaces.asciidoc +++ b/serverless/pages/spaces.asciidoc @@ -1,7 +1,7 @@ [[spaces]] = Spaces -:description: Organize your project and objects into multiple spaces. +// :description: Organize your project and objects into multiple spaces. This content applies to: {es-badge} {obs-badge} {sec-badge} diff --git a/serverless/pages/tags.asciidoc b/serverless/pages/tags.asciidoc index 9652405b1a..88e161591f 100644 --- a/serverless/pages/tags.asciidoc +++ b/serverless/pages/tags.asciidoc @@ -1,8 +1,8 @@ [[tags]] = {tags-app} -:description: Use tags to categorize your saved objects, then filter for related objects based on shared tags. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Use tags to categorize your saved objects, then filter for related objects based on shared tags. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/technical-preview-limitations.asciidoc b/serverless/pages/technical-preview-limitations.asciidoc index 9b38795256..756ad4133c 100644 --- a/serverless/pages/technical-preview-limitations.asciidoc +++ b/serverless/pages/technical-preview-limitations.asciidoc @@ -1,8 +1,8 @@ [[elasticsearch-technical-preview-limitations]] = Technical preview limitations -:description: Review the limitations that apply to Elasticsearch projects. -:keywords: serverless, elasticsearch +// :description: Review the limitations that apply to Elasticsearch projects. +// :keywords: serverless, elasticsearch preview:[] diff --git a/serverless/pages/transforms.asciidoc b/serverless/pages/transforms.asciidoc index 94e79b2524..29e6187a23 100644 --- a/serverless/pages/transforms.asciidoc +++ b/serverless/pages/transforms.asciidoc @@ -1,8 +1,8 @@ [[transforms]] = {transforms-app} -:description: Use transforms to pivot existing indices into summarized or entity-centric indices. -:keywords: serverless, Elasticsearch, Observability, Security +// :description: Use transforms to pivot existing indices into summarized or entity-centric indices. +// :keywords: serverless, Elasticsearch, Observability, Security preview:[] diff --git a/serverless/pages/user-profile.asciidoc b/serverless/pages/user-profile.asciidoc index 041b29e53e..5d7a04f93c 100644 --- a/serverless/pages/user-profile.asciidoc +++ b/serverless/pages/user-profile.asciidoc @@ -1,8 +1,8 @@ [[general-user-profile]] = Update your user profile -:description: Manage your profile settings. -:keywords: serverless, general, profile, update +// :description: Manage your profile settings. +// :keywords: serverless, general, profile, update preview:[] diff --git a/serverless/pages/visualize-library.asciidoc b/serverless/pages/visualize-library.asciidoc index 8131c37e5a..a4be7d86bc 100644 --- a/serverless/pages/visualize-library.asciidoc +++ b/serverless/pages/visualize-library.asciidoc @@ -1,7 +1,7 @@ [role="exclude",id="visualize-library"] = Visualize Library -:keywords: serverless, Elasticsearch, Observability, Security +// :keywords: serverless, Elasticsearch, Observability, Security //// /* TODO: Figure out best way to deal with inconsistent location of these capabilities in different solutions. diff --git a/serverless/pages/what-is-elasticsearch-serverless.asciidoc b/serverless/pages/what-is-elasticsearch-serverless.asciidoc index 3281a97da7..61a8afc610 100644 --- a/serverless/pages/what-is-elasticsearch-serverless.asciidoc +++ b/serverless/pages/what-is-elasticsearch-serverless.asciidoc @@ -2,8 +2,8 @@ To be rewritten/refined //// -:description: Build search solutions and applications with {es}. -:keywords: serverless, elasticsearch, overview +// :description: Build search solutions and applications with {es}. +// :keywords: serverless, elasticsearch, overview preview:[] diff --git a/serverless/pages/what-is-serverless.asciidoc b/serverless/pages/what-is-serverless.asciidoc index 59e0be7db2..c14ea995b9 100644 --- a/serverless/pages/what-is-serverless.asciidoc +++ b/serverless/pages/what-is-serverless.asciidoc @@ -1,7 +1,7 @@ [[general-what-is-serverless-elastic]] = What is serverless Elastic? -:keywords: serverless +// :keywords: serverless Serverless projects use the core components of the {stack}, such as {es} and {kib}, and are based on https://www.elastic.co/blog/elastic-serverless-architecture[an architecture that decouples compute and storage]. Search and indexing operations are separated, which offers high flexibility for scaling your workloads while ensuring