-
Notifications
You must be signed in to change notification settings - Fork 4.5k
/
KeywordTokenizer.cs
118 lines (101 loc) · 3.95 KB
/
KeywordTokenizer.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System.Text.Json;
using Azure.Core;
namespace Azure.Search.Documents.Indexes.Models
{
[CodeGenModel("KeywordTokenizerV2")]
[CodeGenSuppress(nameof(KeywordTokenizer), typeof(string), typeof(string), typeof(int?))]
public partial class KeywordTokenizer : IUtf8JsonSerializable
{
/// <summary>
/// Initializes a new instance of KeywordTokenizer.
/// </summary>
/// <param name="name">
/// The name of the tokenizer. It must only contain letters, digits, spaces,
/// dashes or underscores, can only start and end with alphanumeric characters,
/// and is limited to 128 characters.
/// </param>
public KeywordTokenizer(string name) : base(name)
{
Argument.AssertNotNull(name, nameof(name));
ODataType = "#Microsoft.Azure.Search.KeywordTokenizerV2";
}
/// <summary>
/// The read buffer size in bytes. Default is 256.
/// Setting this property on new instances of <see cref="KeywordTokenizer"/> may result in an error
/// when sending new requests to the Azure Cognitive Search service.
/// </summary>
public int? BufferSize { get; set; }
/// <summary>
/// The maximum token length. Default is 256.
/// Tokens longer than the maximum length are split.
/// The maximum token length that can be used is 300 characters.
/// </summary>
public int? MaxTokenLength { get; set; }
// Use global scope to fully qualify name to work around bug in generator currently.
void global::Azure.Core.IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
{
writer.WriteStartObject();
writer.WritePropertyName("@odata.type");
writer.WriteStringValue(ODataType);
writer.WritePropertyName("name");
writer.WriteStringValue(Name);
if (BufferSize != null)
{
writer.WritePropertyName("bufferSize");
writer.WriteNumberValue(BufferSize.Value);
}
if (MaxTokenLength != null)
{
writer.WritePropertyName("maxTokenLength");
writer.WriteNumberValue(MaxTokenLength.Value);
}
writer.WriteEndObject();
}
internal static KeywordTokenizer DeserializeKeywordTokenizer(JsonElement element)
{
int? bufferSize = default;
int? maxTokenLength = default;
string odataType = default;
string name = default;
foreach (JsonProperty property in element.EnumerateObject())
{
if (property.NameEquals("@odata.type"))
{
odataType = property.Value.GetString();
continue;
}
if (property.NameEquals("name"))
{
name = property.Value.GetString();
continue;
}
if (property.NameEquals("bufferSize"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
bufferSize = property.Value.GetInt32();
continue;
}
if (property.NameEquals("maxTokenLength"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
maxTokenLength = property.Value.GetInt32();
continue;
}
}
return new KeywordTokenizer(name)
{
ODataType = odataType,
BufferSize = bufferSize,
MaxTokenLength = maxTokenLength,
};
}
}
}