diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 23fb351964..0353b2f56e 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -72149,17 +72149,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index b7c586f419..b2114eaa3a 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -46614,17 +46614,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/schema/schema.json b/output/schema/schema.json index c379438bc7..8bd88af402 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -73515,33 +73515,23 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, + "serverDefault": [], "type": { - "kind": "union_of", - "items": [ - { - "kind": "instance_of", - "type": { - "name": "string", - "namespace": "_builtins" - } - }, - { - "kind": "array_of", - "value": { - "kind": "instance_of", - "type": { - "name": "TokenChar", - "namespace": "_types.analysis" - } - } + "kind": "array_of", + "value": { + "kind": "instance_of", + "type": { + "name": "TokenChar", + "namespace": "_types.analysis" } - ] + } } } ], - "specLocation": "_types/analysis/tokenizers.ts#L48-L54" + "specLocation": "_types/analysis/tokenizers.ts#L48-L58" }, { "kind": "interface", @@ -75576,7 +75566,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L65-L71" + "specLocation": "_types/analysis/tokenizers.ts#L69-L75" }, { "kind": "interface", @@ -76019,7 +76009,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L73-L75" + "specLocation": "_types/analysis/tokenizers.ts#L77-L79" }, { "kind": "interface", @@ -76227,7 +76217,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L77-L79" + "specLocation": "_types/analysis/tokenizers.ts#L81-L83" }, { "kind": "interface", @@ -76534,6 +76524,7 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, "serverDefault": [], @@ -76549,7 +76540,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L81-L90" + "specLocation": "_types/analysis/tokenizers.ts#L85-L95" }, { "kind": "interface", @@ -76937,7 +76928,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L92-L99" + "specLocation": "_types/analysis/tokenizers.ts#L97-L104" }, { "kind": "interface", @@ -77255,7 +77246,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L101-L106" + "specLocation": "_types/analysis/tokenizers.ts#L106-L111" }, { "kind": "interface", @@ -78173,7 +78164,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L113-L116" + "specLocation": "_types/analysis/tokenizers.ts#L118-L121" }, { "kind": "interface", @@ -78208,7 +78199,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L108-L111" + "specLocation": "_types/analysis/tokenizers.ts#L113-L116" }, { "kind": "interface", @@ -78593,7 +78584,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L118-L121" + "specLocation": "_types/analysis/tokenizers.ts#L123-L126" }, { "kind": "interface", @@ -79258,7 +79249,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L123-L125" + "specLocation": "_types/analysis/tokenizers.ts#L128-L130" }, { "kind": "enum", @@ -79286,7 +79277,7 @@ "name": "TokenChar", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L56-L63" + "specLocation": "_types/analysis/tokenizers.ts#L60-L67" }, { "kind": "type_alias", @@ -79890,7 +79881,7 @@ "name": "Tokenizer", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L137-L142", + "specLocation": "_types/analysis/tokenizers.ts#L142-L147", "type": { "kind": "union_of", "items": [ @@ -79938,7 +79929,7 @@ "name": "TokenizerDefinition", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L144-L167", + "specLocation": "_types/analysis/tokenizers.ts#L149-L172", "type": { "kind": "union_of", "items": [ @@ -80223,7 +80214,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L127-L130" + "specLocation": "_types/analysis/tokenizers.ts#L132-L135" }, { "kind": "interface", @@ -80347,7 +80338,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L132-L135" + "specLocation": "_types/analysis/tokenizers.ts#L137-L140" }, { "kind": "interface", diff --git a/output/typescript/types.ts b/output/typescript/types.ts index b99d9eb857..af08c6a6df 100644 --- a/output/typescript/types.ts +++ b/output/typescript/types.ts @@ -4741,7 +4741,7 @@ export interface AnalysisEdgeNGramTokenizer extends AnalysisTokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | AnalysisTokenChar[] + token_chars?: AnalysisTokenChar[] } export interface AnalysisElisionTokenFilter extends AnalysisTokenFilterBase { diff --git a/specification/_types/analysis/tokenizers.ts b/specification/_types/analysis/tokenizers.ts index 649b555eea..591e5c357f 100644 --- a/specification/_types/analysis/tokenizers.ts +++ b/specification/_types/analysis/tokenizers.ts @@ -50,7 +50,11 @@ export class EdgeNGramTokenizer extends TokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | TokenChar[] + /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough + * @server_default [] + */ + token_chars?: TokenChar[] } export enum TokenChar { @@ -84,6 +88,7 @@ export class NGramTokenizer extends TokenizerBase { max_gram?: integer min_gram?: integer /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough * @server_default [] */ token_chars?: TokenChar[]