-
Notifications
You must be signed in to change notification settings - Fork 623
/
ParallelCompositeReader.cs
263 lines (249 loc) · 12 KB
/
ParallelCompositeReader.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
using J2N.Runtime.CompilerServices;
using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using Lucene.Net.Support.Threading;
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.ExceptionServices;
using JCG = J2N.Collections.Generic;
namespace Lucene.Net.Index
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A <see cref="CompositeReader"/> which reads multiple, parallel indexes. Each index added
/// must have the same number of documents, and exactly the same hierarchical subreader structure,
/// but typically each contains different fields. Deletions are taken from the first reader.
/// Each document contains the union of the fields of all
/// documents with the same document number. When searching, matches for a
/// query term are from the first index added that has the field.
///
/// <para/>This is useful, e.g., with collections that have large fields which
/// change rarely and small fields that change more frequently. The smaller
/// fields may be re-indexed in a new index and both indexes may be searched
/// together.
///
/// <para/><strong>Warning:</strong> It is up to you to make sure all indexes
/// are created and modified the same way. For example, if you add
/// documents to one index, you need to add the same documents in the
/// same order to the other indexes. <em>Failure to do so will result in
/// undefined behavior</em>.
/// A good strategy to create suitable indexes with <see cref="IndexWriter"/> is to use
/// <see cref="LogDocMergePolicy"/>, as this one does not reorder documents
/// during merging (like <see cref="TieredMergePolicy"/>) and triggers merges
/// by number of documents per segment. If you use different <see cref="MergePolicy"/>s
/// it might happen that the segment structure of your index is no longer predictable.
/// </summary>
public class ParallelCompositeReader : BaseCompositeReader<IndexReader>
{
private readonly bool closeSubReaders;
private readonly ISet<IndexReader> completeReaderSet = new JCG.HashSet<IndexReader>(IdentityEqualityComparer<IndexReader>.Default);
/// <summary>
/// Create a <see cref="ParallelCompositeReader"/> based on the provided
/// readers; auto-disposes the given <paramref name="readers"/> on <see cref="IndexReader.Dispose()"/>.
/// </summary>
public ParallelCompositeReader(params CompositeReader[] readers)
: this(true, readers)
{
}
/// <summary>
/// Create a <see cref="ParallelCompositeReader"/> based on the provided
/// <paramref name="readers"/>.
/// </summary>
public ParallelCompositeReader(bool closeSubReaders, params CompositeReader[] readers)
: this(closeSubReaders, readers, readers)
{
}
/// <summary>
/// Expert: create a <see cref="ParallelCompositeReader"/> based on the provided
/// <paramref name="readers"/> and <paramref name="storedFieldReaders"/>; when a document is
/// loaded, only <paramref name="storedFieldReaders"/> will be used.
/// </summary>
public ParallelCompositeReader(bool closeSubReaders, CompositeReader[] readers, CompositeReader[] storedFieldReaders)
: base(PrepareSubReaders(readers, storedFieldReaders))
{
this.closeSubReaders = closeSubReaders;
completeReaderSet.UnionWith(readers);
completeReaderSet.UnionWith(storedFieldReaders);
// update ref-counts (like MultiReader):
if (!closeSubReaders)
{
foreach (IndexReader reader in completeReaderSet)
{
reader.IncRef();
}
}
// finally add our own synthetic readers, so we close or decRef them, too (it does not matter what we do)
completeReaderSet.UnionWith(GetSequentialSubReaders());
}
private static IndexReader[] PrepareSubReaders(CompositeReader[] readers, CompositeReader[] storedFieldsReaders)
{
if (readers.Length == 0)
{
if (storedFieldsReaders.Length > 0)
{
throw new ArgumentException("There must be at least one main reader if storedFieldsReaders are used.");
}
// LUCENENET: Optimized empty string array creation
return Arrays.Empty<IndexReader>();
}
else
{
IList<IndexReader> firstSubReaders = readers[0].GetSequentialSubReaders();
// check compatibility:
int maxDoc = readers[0].MaxDoc, noSubs = firstSubReaders.Count;
int[] childMaxDoc = new int[noSubs];
bool[] childAtomic = new bool[noSubs];
for (int i = 0; i < noSubs; i++)
{
IndexReader r = firstSubReaders[i];
childMaxDoc[i] = r.MaxDoc;
childAtomic[i] = r is AtomicReader;
}
Validate(readers, maxDoc, childMaxDoc, childAtomic);
Validate(storedFieldsReaders, maxDoc, childMaxDoc, childAtomic);
// hierarchically build the same subreader structure as the first CompositeReader with Parallel*Readers:
IndexReader[] subReaders = new IndexReader[noSubs];
for (int i = 0; i < subReaders.Length; i++)
{
if (firstSubReaders[i] is AtomicReader)
{
AtomicReader[] atomicSubs = new AtomicReader[readers.Length];
for (int j = 0; j < readers.Length; j++)
{
atomicSubs[j] = (AtomicReader)readers[j].GetSequentialSubReaders()[i];
}
AtomicReader[] storedSubs = new AtomicReader[storedFieldsReaders.Length];
for (int j = 0; j < storedFieldsReaders.Length; j++)
{
storedSubs[j] = (AtomicReader)storedFieldsReaders[j].GetSequentialSubReaders()[i];
}
// We pass true for closeSubs and we prevent closing of subreaders in doClose():
// By this the synthetic throw-away readers used here are completely invisible to ref-counting
subReaders[i] = new ParallelAtomicReaderAnonymousClass(atomicSubs, storedSubs);
}
else
{
if (Debugging.AssertsEnabled) Debugging.Assert(firstSubReaders[i] is CompositeReader);
CompositeReader[] compositeSubs = new CompositeReader[readers.Length];
for (int j = 0; j < readers.Length; j++)
{
compositeSubs[j] = (CompositeReader)readers[j].GetSequentialSubReaders()[i];
}
CompositeReader[] storedSubs = new CompositeReader[storedFieldsReaders.Length];
for (int j = 0; j < storedFieldsReaders.Length; j++)
{
storedSubs[j] = (CompositeReader)storedFieldsReaders[j].GetSequentialSubReaders()[i];
}
// We pass true for closeSubs and we prevent closing of subreaders in doClose():
// By this the synthetic throw-away readers used here are completely invisible to ref-counting
subReaders[i] = new ParallelCompositeReaderAnonymousClass(compositeSubs, storedSubs);
}
}
return subReaders;
}
}
private class ParallelAtomicReaderAnonymousClass : ParallelAtomicReader
{
public ParallelAtomicReaderAnonymousClass(Lucene.Net.Index.AtomicReader[] atomicSubs, Lucene.Net.Index.AtomicReader[] storedSubs)
: base(true, atomicSubs, storedSubs)
{
}
protected internal override void DoClose()
{
}
}
private class ParallelCompositeReaderAnonymousClass : ParallelCompositeReader
{
public ParallelCompositeReaderAnonymousClass(Lucene.Net.Index.CompositeReader[] compositeSubs, Lucene.Net.Index.CompositeReader[] storedSubs)
: base(true, compositeSubs, storedSubs)
{
}
protected internal override void DoClose()
{
}
}
private static void Validate(CompositeReader[] readers, int maxDoc, int[] childMaxDoc, bool[] childAtomic)
{
for (int i = 0; i < readers.Length; i++)
{
CompositeReader reader = readers[i];
IList<IndexReader> subs = reader.GetSequentialSubReaders();
if (reader.MaxDoc != maxDoc)
{
throw new ArgumentException("All readers must have same MaxDoc: " + maxDoc + "!=" + reader.MaxDoc);
}
int noSubs = subs.Count;
if (noSubs != childMaxDoc.Length)
{
throw new ArgumentException("All readers must have same number of subReaders");
}
for (int subIDX = 0; subIDX < noSubs; subIDX++)
{
IndexReader r = subs[subIDX];
if (r.MaxDoc != childMaxDoc[subIDX])
{
throw new ArgumentException("All readers must have same corresponding subReader maxDoc");
}
if (!(childAtomic[subIDX] ? (r is AtomicReader) : (r is CompositeReader)))
{
throw new ArgumentException("All readers must have same corresponding subReader types (atomic or composite)");
}
}
}
}
protected internal override void DoClose()
{
UninterruptableMonitor.Enter(this);
try
{
Exception ioe = null; // LUCENENET: No need to cast to IOExcpetion
foreach (IndexReader reader in completeReaderSet)
{
try
{
if (closeSubReaders)
{
reader.Dispose();
}
else
{
reader.DecRef();
}
}
catch (Exception e) when (e.IsIOException())
{
if (ioe is null)
{
ioe = e;
}
}
}
// throw the first exception
if (ioe != null)
{
ExceptionDispatchInfo.Capture(ioe).Throw(); // LUCENENET: Rethrow to preserve stack details from the original throw
}
}
finally
{
UninterruptableMonitor.Exit(this);
}
}
}
}