From d5d01ce6490f78b0b1423c77f937d8d5c64a4584 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 23 Sep 2025 17:10:02 +0200 Subject: [PATCH 1/6] [E&A] Creates a page for EIS migration guide. --- explore-analyze/elastic-inference/ml-node-vs-eis.md | 11 +++++++++++ explore-analyze/toc.yml | 2 ++ 2 files changed, 13 insertions(+) create mode 100644 explore-analyze/elastic-inference/ml-node-vs-eis.md diff --git a/explore-analyze/elastic-inference/ml-node-vs-eis.md b/explore-analyze/elastic-inference/ml-node-vs-eis.md new file mode 100644 index 0000000000..7107cdab96 --- /dev/null +++ b/explore-analyze/elastic-inference/ml-node-vs-eis.md @@ -0,0 +1,11 @@ +--- +navigation_title: ML-nodes vs EIS +applies_to: + stack: ga + serverless: ga + deployment: + self: unavailable +--- + +# ML-nodes vs Elastic {{infer-cap}} Service (EIS) [ml-nodes-vs-eis] + diff --git a/explore-analyze/toc.yml b/explore-analyze/toc.yml index 2c4588f063..5d005df7f4 100644 --- a/explore-analyze/toc.yml +++ b/explore-analyze/toc.yml @@ -110,6 +110,8 @@ toc: - file: elastic-inference.md children: - file: elastic-inference/eis.md + children: + - hidden: elastic-inference/ml-node-vs-eis.md - file: elastic-inference/inference-api.md - file: machine-learning.md children: From c56ec4bdc9b6a775fac0c7e9a45c2941d1990584 Mon Sep 17 00:00:00 2001 From: Sean Handley Date: Tue, 4 Nov 2025 05:15:42 -0800 Subject: [PATCH 2/6] Add some Qs. --- .../elastic-inference/ml-node-vs-eis.md | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/explore-analyze/elastic-inference/ml-node-vs-eis.md b/explore-analyze/elastic-inference/ml-node-vs-eis.md index 7107cdab96..0d83e68f63 100644 --- a/explore-analyze/elastic-inference/ml-node-vs-eis.md +++ b/explore-analyze/elastic-inference/ml-node-vs-eis.md @@ -7,5 +7,46 @@ applies_to: self: unavailable --- -# ML-nodes vs Elastic {{infer-cap}} Service (EIS) [ml-nodes-vs-eis] +# Using ML-nodes or Elastic {{infer-cap}} Service (EIS) [ml-nodes-vs-eis] +## When to use EIS? + +The Elastic Inference Service (EIS) requires zero setup or management. It's always-on, has excellent ingest throughput, and uses simple token-based billing. + +You should use EIS if you're getting started with semantic/hybrid search and want a smooth experience. Under the hood, EIS uses GPUs for ML inference, which are far more efficient and allow a faster (and more cost-effective) experience for most usecases. + +## When to use ML nodes? + +ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute ML inference. ML nodes tend to incur higher costs but give more control. + +You should use ML nodes if you want to decide how your models run, you want to run custom models, or you have a self-managed setup. + +## How do I switch from using ML nodes to EIS on an existing index? + +The below will work in serverless now, and everywhere else after 9.3: + +```console +PUT /my-ml-node-index/_mapping +{ + "properties": { + "text": { + "type": "semantic_text", + "inference_id": ".elser-2-elastic" + } + } +} +``` + +You can also switch an EIS-based index to use ML nodes: + +```console +PUT /my-eis-index/_mapping +{ + "properties": { + "text": { + "type": "semantic_text", + "inference_id": ".elser-2-elasticsearch" + } + } +} +``` \ No newline at end of file From 2d6327d3235235af880f69e77302b00b8ad1d77b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 5 Nov 2025 16:50:26 +0100 Subject: [PATCH 3/6] Edits. --- .../elastic-inference/ml-node-vs-eis.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/explore-analyze/elastic-inference/ml-node-vs-eis.md b/explore-analyze/elastic-inference/ml-node-vs-eis.md index 0d83e68f63..6a773d03ed 100644 --- a/explore-analyze/elastic-inference/ml-node-vs-eis.md +++ b/explore-analyze/elastic-inference/ml-node-vs-eis.md @@ -13,17 +13,20 @@ applies_to: The Elastic Inference Service (EIS) requires zero setup or management. It's always-on, has excellent ingest throughput, and uses simple token-based billing. -You should use EIS if you're getting started with semantic/hybrid search and want a smooth experience. Under the hood, EIS uses GPUs for ML inference, which are far more efficient and allow a faster (and more cost-effective) experience for most usecases. +Use EIS if you're getting started with [semantic search](./solutions/search/semantic-search.md) or [hybrid search](./solutions/search/hybrid-search-md) and want a smooth experience. Under the hood, EIS uses GPUs for ML {{infer}}, which are more efficient and allow a faster, more cost-effective experience for most usecases. -## When to use ML nodes? +## When to use {{ml}} nodes? -ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute ML inference. ML nodes tend to incur higher costs but give more control. +ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}]((./explore-analyze/elastic-inference/inference-api.md)). {{ml-cap}} nodes tend to incur higher costs but give more control. -You should use ML nodes if you want to decide how your models run, you want to run custom models, or you have a self-managed setup. +Use ML nodes if you want to decide how your models run, you want to run custom models, or you have a self-managed setup. ## How do I switch from using ML nodes to EIS on an existing index? -The below will work in serverless now, and everywhere else after 9.3: +```{applies_to} +stack: ga 9.3 +serverless: ga +``` ```console PUT /my-ml-node-index/_mapping @@ -49,4 +52,4 @@ PUT /my-eis-index/_mapping } } } -``` \ No newline at end of file +``` From e12ce70f6148c1af4012d08289e3a167c3355275 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 5 Nov 2025 16:52:49 +0100 Subject: [PATCH 4/6] Fixes URLs. --- explore-analyze/elastic-inference/ml-node-vs-eis.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/explore-analyze/elastic-inference/ml-node-vs-eis.md b/explore-analyze/elastic-inference/ml-node-vs-eis.md index 6a773d03ed..c96073dcba 100644 --- a/explore-analyze/elastic-inference/ml-node-vs-eis.md +++ b/explore-analyze/elastic-inference/ml-node-vs-eis.md @@ -13,11 +13,11 @@ applies_to: The Elastic Inference Service (EIS) requires zero setup or management. It's always-on, has excellent ingest throughput, and uses simple token-based billing. -Use EIS if you're getting started with [semantic search](./solutions/search/semantic-search.md) or [hybrid search](./solutions/search/hybrid-search-md) and want a smooth experience. Under the hood, EIS uses GPUs for ML {{infer}}, which are more efficient and allow a faster, more cost-effective experience for most usecases. +Use EIS if you're getting started with [semantic search](/solutions/search/semantic-search.md) or [hybrid search](/solutions/search/hybrid-search-md) and want a smooth experience. Under the hood, EIS uses GPUs for ML {{infer}}, which are more efficient and allow a faster, more cost-effective experience for most usecases. ## When to use {{ml}} nodes? -ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}]((./explore-analyze/elastic-inference/inference-api.md)). {{ml-cap}} nodes tend to incur higher costs but give more control. +ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}]((/explore-analyze/elastic-inference/inference-api.md)). {{ml-cap}} nodes tend to incur higher costs but give more control. Use ML nodes if you want to decide how your models run, you want to run custom models, or you have a self-managed setup. From 861512139d75a2b7302d5f92cde592046e5c25a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 5 Nov 2025 16:55:29 +0100 Subject: [PATCH 5/6] Further edits. --- explore-analyze/elastic-inference/ml-node-vs-eis.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/explore-analyze/elastic-inference/ml-node-vs-eis.md b/explore-analyze/elastic-inference/ml-node-vs-eis.md index c96073dcba..4123b41b1c 100644 --- a/explore-analyze/elastic-inference/ml-node-vs-eis.md +++ b/explore-analyze/elastic-inference/ml-node-vs-eis.md @@ -13,11 +13,11 @@ applies_to: The Elastic Inference Service (EIS) requires zero setup or management. It's always-on, has excellent ingest throughput, and uses simple token-based billing. -Use EIS if you're getting started with [semantic search](/solutions/search/semantic-search.md) or [hybrid search](/solutions/search/hybrid-search-md) and want a smooth experience. Under the hood, EIS uses GPUs for ML {{infer}}, which are more efficient and allow a faster, more cost-effective experience for most usecases. +Use EIS if you're getting started with [semantic search](/solutions/search/semantic-search.md) or [hybrid search](/solutions/search/hybrid-search.md) and want a smooth experience. Under the hood, EIS uses GPUs for ML {{infer}}, which are more efficient and allow a faster, more cost-effective experience for most usecases. ## When to use {{ml}} nodes? -ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}]((/explore-analyze/elastic-inference/inference-api.md)). {{ml-cap}} nodes tend to incur higher costs but give more control. +ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}](/explore-analyze/elastic-inference/inference-api.md). {{ml-cap}} nodes tend to incur higher costs but give more control. Use ML nodes if you want to decide how your models run, you want to run custom models, or you have a self-managed setup. From 638334bdd0893e7ac27325a100c79c75a39d3963 Mon Sep 17 00:00:00 2001 From: Sean Handley Date: Wed, 5 Nov 2025 08:17:12 -0800 Subject: [PATCH 6/6] Wording change for more positive framing. --- explore-analyze/elastic-inference/ml-node-vs-eis.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/explore-analyze/elastic-inference/ml-node-vs-eis.md b/explore-analyze/elastic-inference/ml-node-vs-eis.md index 4123b41b1c..e50d5992d5 100644 --- a/explore-analyze/elastic-inference/ml-node-vs-eis.md +++ b/explore-analyze/elastic-inference/ml-node-vs-eis.md @@ -17,7 +17,7 @@ Use EIS if you're getting started with [semantic search](/solutions/search/seman ## When to use {{ml}} nodes? -ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}](/explore-analyze/elastic-inference/inference-api.md). {{ml-cap}} nodes tend to incur higher costs but give more control. +ML nodes are a more configurable solution than EIS where you can set up specific nodes using CPUs to execute [ML {{infer}}](/explore-analyze/elastic-inference/inference-api.md). {{ml-cap}} nodes tend to give more fine-grained control. Use ML nodes if you want to decide how your models run, you want to run custom models, or you have a self-managed setup.