From f9a0b4712e46870bd41ba1f9ffc5cc2f0c4bc088 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Thu, 24 Jul 2025 15:01:28 +0400 Subject: [PATCH] Remove string variant for token_chars field It does not bring value. --- output/openapi/elasticsearch-openapi.json | 16 ++--- .../elasticsearch-serverless-openapi.json | 16 ++--- output/schema/schema.json | 61 ++++++++----------- output/typescript/types.ts | 2 +- specification/_types/analysis/tokenizers.ts | 7 ++- 5 files changed, 43 insertions(+), 59 deletions(-) diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 8370b5872e..8a32630c45 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -74129,17 +74129,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index 45d2941614..660423dbf6 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -48314,17 +48314,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/schema/schema.json b/output/schema/schema.json index 3ed84095f2..bf2bad15f2 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -74723,33 +74723,23 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, + "serverDefault": [], "type": { - "kind": "union_of", - "items": [ - { - "kind": "instance_of", - "type": { - "name": "string", - "namespace": "_builtins" - } - }, - { - "kind": "array_of", - "value": { - "kind": "instance_of", - "type": { - "name": "TokenChar", - "namespace": "_types.analysis" - } - } + "kind": "array_of", + "value": { + "kind": "instance_of", + "type": { + "name": "TokenChar", + "namespace": "_types.analysis" } - ] + } } } ], - "specLocation": "_types/analysis/tokenizers.ts#L48-L54" + "specLocation": "_types/analysis/tokenizers.ts#L48-L58" }, { "kind": "interface", @@ -76787,7 +76777,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L65-L71" + "specLocation": "_types/analysis/tokenizers.ts#L69-L75" }, { "kind": "interface", @@ -77230,7 +77220,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L73-L75" + "specLocation": "_types/analysis/tokenizers.ts#L77-L79" }, { "kind": "interface", @@ -77438,7 +77428,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L77-L79" + "specLocation": "_types/analysis/tokenizers.ts#L81-L83" }, { "kind": "interface", @@ -77745,6 +77735,7 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, "serverDefault": [], @@ -77760,7 +77751,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L81-L90" + "specLocation": "_types/analysis/tokenizers.ts#L85-L95" }, { "kind": "interface", @@ -78152,7 +78143,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L92-L99" + "specLocation": "_types/analysis/tokenizers.ts#L97-L104" }, { "kind": "interface", @@ -78493,7 +78484,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L101-L106" + "specLocation": "_types/analysis/tokenizers.ts#L106-L111" }, { "kind": "interface", @@ -79415,7 +79406,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L113-L116" + "specLocation": "_types/analysis/tokenizers.ts#L118-L121" }, { "kind": "interface", @@ -79450,7 +79441,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L108-L111" + "specLocation": "_types/analysis/tokenizers.ts#L113-L116" }, { "kind": "interface", @@ -79855,7 +79846,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L118-L121" + "specLocation": "_types/analysis/tokenizers.ts#L123-L126" }, { "kind": "interface", @@ -80527,7 +80518,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L123-L125" + "specLocation": "_types/analysis/tokenizers.ts#L128-L130" }, { "kind": "enum", @@ -80555,7 +80546,7 @@ "name": "TokenChar", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L56-L63" + "specLocation": "_types/analysis/tokenizers.ts#L60-L67" }, { "kind": "type_alias", @@ -81159,7 +81150,7 @@ "name": "Tokenizer", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L137-L142", + "specLocation": "_types/analysis/tokenizers.ts#L142-L147", "type": { "kind": "union_of", "items": [ @@ -81207,7 +81198,7 @@ "name": "TokenizerDefinition", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L144-L167", + "specLocation": "_types/analysis/tokenizers.ts#L149-L172", "type": { "kind": "union_of", "items": [ @@ -81492,7 +81483,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L127-L130" + "specLocation": "_types/analysis/tokenizers.ts#L132-L135" }, { "kind": "interface", @@ -81620,7 +81611,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L132-L135" + "specLocation": "_types/analysis/tokenizers.ts#L137-L140" }, { "kind": "interface", diff --git a/output/typescript/types.ts b/output/typescript/types.ts index 51f7959779..507b9e7164 100644 --- a/output/typescript/types.ts +++ b/output/typescript/types.ts @@ -4746,7 +4746,7 @@ export interface AnalysisEdgeNGramTokenizer extends AnalysisTokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | AnalysisTokenChar[] + token_chars?: AnalysisTokenChar[] } export interface AnalysisElisionTokenFilter extends AnalysisTokenFilterBase { diff --git a/specification/_types/analysis/tokenizers.ts b/specification/_types/analysis/tokenizers.ts index 649b555eea..591e5c357f 100644 --- a/specification/_types/analysis/tokenizers.ts +++ b/specification/_types/analysis/tokenizers.ts @@ -50,7 +50,11 @@ export class EdgeNGramTokenizer extends TokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | TokenChar[] + /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough + * @server_default [] + */ + token_chars?: TokenChar[] } export enum TokenChar { @@ -84,6 +88,7 @@ export class NGramTokenizer extends TokenizerBase { max_gram?: integer min_gram?: integer /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough * @server_default [] */ token_chars?: TokenChar[]