From 7cc976580c24f7750ee40da0f459a410394db281 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 24 Mar 2025 09:13:12 -0700 Subject: [PATCH] Move inference examples to appropriate task types (#4049) (cherry picked from commit ae85ae34f0aa4147d6da8c90351e34c3fa42f1fe) --- output/openapi/elasticsearch-openapi.json | 56 +++++++++++++++++++ .../elasticsearch-serverless-openapi.json | 56 +++++++++++++++++++ output/schema/schema-serverless.json | 56 +++++++++++++++++++ output/schema/schema.json | 56 +++++++++++++++++++ .../request/CompletionRequestExample1.yaml} | 0 .../response/CompletionResponseExample1.yaml} | 0 .../request/RerankRequestExample1.yaml} | 0 .../response/RerankResponseExample1.yaml} | 0 .../SparseEmbeddingRequestExample1.yaml} | 0 .../SparseEmbeddingResponseExample1.yaml} | 0 .../TextEmbeddingRequestExample1.yaml} | 0 .../TextEmbeddingResponseExample1.yaml} | 0 12 files changed, 224 insertions(+) rename specification/inference/{inference/examples/request/InferenceRequestExample1.yaml => completion/examples/request/CompletionRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample1.yaml => completion/examples/response/CompletionResponseExample1.yaml} (100%) rename specification/inference/{inference/examples/request/InferenceRequestExample2.yaml => rerank/examples/request/RerankRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample2.yaml => rerank/examples/response/RerankResponseExample1.yaml} (100%) rename specification/inference/{inference/examples/request/InferenceRequestExample3.yaml => sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample3.yaml => sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml} (100%) rename specification/inference/{inference/examples/request/InferenceRequestExample4.yaml => text_embedding/examples/request/TextEmbeddingRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample4.yaml => text_embedding/examples/response/TextEmbeddingResponseExample1.yaml} (100%) diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index d7c45ea928..dc31777de0 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -17607,6 +17607,13 @@ "required": [ "input" ] + }, + "examples": { + "CompletionRequestExample1": { + "summary": "Completion task", + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } } } } @@ -17618,6 +17625,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:CompletionInferenceResult" + }, + "examples": { + "CompletionResponseExample1": { + "summary": "Completion task", + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } } } } @@ -18203,6 +18217,13 @@ "query", "input" ] + }, + "examples": { + "RerankRequestExample1": { + "summary": "Rerank task", + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } } } } @@ -18214,6 +18235,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:RerankedInferenceResult" + }, + "examples": { + "RerankResponseExample1": { + "summary": "Rerank task", + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } } } } @@ -18279,6 +18307,13 @@ "required": [ "input" ] + }, + "examples": { + "SparseEmbeddingRequestExample1": { + "summary": "Sparse embedding task", + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } } } } @@ -18290,6 +18325,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:SparseEmbeddingInferenceResult" + }, + "examples": { + "SparseEmbeddingResponseExample1": { + "summary": "Sparse embedding task", + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } } } } @@ -18429,6 +18471,13 @@ "required": [ "input" ] + }, + "examples": { + "TextEmbeddingRequestExample1": { + "summary": "Text embedding task", + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } } } } @@ -18440,6 +18489,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:TextEmbeddingInferenceResult" + }, + "examples": { + "TextEmbeddingResponseExample1": { + "summary": "Text embedding task", + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } } } } diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index 729211bdd2..6d53a256c9 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -9573,6 +9573,13 @@ "required": [ "input" ] + }, + "examples": { + "CompletionRequestExample1": { + "summary": "Completion task", + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } } } } @@ -9584,6 +9591,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:CompletionInferenceResult" + }, + "examples": { + "CompletionResponseExample1": { + "summary": "Completion task", + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } } } } @@ -10169,6 +10183,13 @@ "query", "input" ] + }, + "examples": { + "RerankRequestExample1": { + "summary": "Rerank task", + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } } } } @@ -10180,6 +10201,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:RerankedInferenceResult" + }, + "examples": { + "RerankResponseExample1": { + "summary": "Rerank task", + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } } } } @@ -10245,6 +10273,13 @@ "required": [ "input" ] + }, + "examples": { + "SparseEmbeddingRequestExample1": { + "summary": "Sparse embedding task", + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } } } } @@ -10256,6 +10291,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:SparseEmbeddingInferenceResult" + }, + "examples": { + "SparseEmbeddingResponseExample1": { + "summary": "Sparse embedding task", + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } } } } @@ -10321,6 +10363,13 @@ "required": [ "input" ] + }, + "examples": { + "TextEmbeddingRequestExample1": { + "summary": "Text embedding task", + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } } } } @@ -10332,6 +10381,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:TextEmbeddingInferenceResult" + }, + "examples": { + "TextEmbeddingResponseExample1": { + "summary": "Text embedding task", + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } } } } diff --git a/output/schema/schema-serverless.json b/output/schema/schema-serverless.json index ebb73589b9..59f9264fa8 100644 --- a/output/schema/schema-serverless.json +++ b/output/schema/schema-serverless.json @@ -27119,6 +27119,13 @@ ] }, "description": "Perform completion inference on the service", + "examples": { + "CompletionRequestExample1": { + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "summary": "Completion task", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -27172,6 +27179,13 @@ } } }, + "examples": { + "CompletionResponseExample1": { + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "summary": "Completion task", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -27987,6 +28001,13 @@ ] }, "description": "Perform rereanking inference on the service", + "examples": { + "RerankRequestExample1": { + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "summary": "Rerank task", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -28040,6 +28061,13 @@ } } }, + "examples": { + "RerankResponseExample1": { + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "summary": "Rerank task", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -28096,6 +28124,13 @@ ] }, "description": "Perform sparse embedding inference on the service", + "examples": { + "SparseEmbeddingRequestExample1": { + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "summary": "Sparse embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -28149,6 +28184,13 @@ } } }, + "examples": { + "SparseEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "summary": "Sparse embedding task", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -28205,6 +28247,13 @@ ] }, "description": "Perform text embedding inference on the service", + "examples": { + "TextEmbeddingRequestExample1": { + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "summary": "Text embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -28258,6 +28307,13 @@ } } }, + "examples": { + "TextEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "summary": "Text embedding task", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", diff --git a/output/schema/schema.json b/output/schema/schema.json index d51718d6f9..4fec83e820 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -150068,6 +150068,13 @@ ] }, "description": "Perform completion inference on the service", + "examples": { + "CompletionRequestExample1": { + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "summary": "Completion task", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -150121,6 +150128,13 @@ } } }, + "examples": { + "CompletionResponseExample1": { + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "summary": "Completion task", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.completion" @@ -151401,6 +151415,13 @@ ] }, "description": "Perform rereanking inference on the service", + "examples": { + "RerankRequestExample1": { + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "summary": "Rerank task", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151454,6 +151475,13 @@ } } }, + "examples": { + "RerankResponseExample1": { + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "summary": "Rerank task", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.rerank" @@ -151510,6 +151538,13 @@ ] }, "description": "Perform sparse embedding inference on the service", + "examples": { + "SparseEmbeddingRequestExample1": { + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "summary": "Sparse embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151563,6 +151598,13 @@ } } }, + "examples": { + "SparseEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "summary": "Sparse embedding task", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.sparse_embedding" @@ -151721,6 +151763,13 @@ ] }, "description": "Perform text embedding inference on the service", + "examples": { + "TextEmbeddingRequestExample1": { + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "summary": "Text embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151774,6 +151823,13 @@ } } }, + "examples": { + "TextEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "summary": "Text embedding task", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.text_embedding" diff --git a/specification/inference/inference/examples/request/InferenceRequestExample1.yaml b/specification/inference/completion/examples/request/CompletionRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample1.yaml rename to specification/inference/completion/examples/request/CompletionRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample1.yaml b/specification/inference/completion/examples/response/CompletionResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample1.yaml rename to specification/inference/completion/examples/response/CompletionResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample2.yaml b/specification/inference/rerank/examples/request/RerankRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample2.yaml rename to specification/inference/rerank/examples/request/RerankRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample2.yaml b/specification/inference/rerank/examples/response/RerankResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample2.yaml rename to specification/inference/rerank/examples/response/RerankResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample3.yaml b/specification/inference/sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample3.yaml rename to specification/inference/sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample3.yaml b/specification/inference/sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample3.yaml rename to specification/inference/sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample4.yaml b/specification/inference/text_embedding/examples/request/TextEmbeddingRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample4.yaml rename to specification/inference/text_embedding/examples/request/TextEmbeddingRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample4.yaml b/specification/inference/text_embedding/examples/response/TextEmbeddingResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample4.yaml rename to specification/inference/text_embedding/examples/response/TextEmbeddingResponseExample1.yaml