@@ -165,9 +165,6 @@ type singleLevelIterator[I any, PI indexBlockIterator[I], D any, PD dataBlockIte
165
165
useFilterBlock bool
166
166
lastBloomFilterMatched bool
167
167
168
- // Lazy loading flag
169
- indexLoaded bool
170
-
171
168
transforms IterTransforms
172
169
173
170
// All fields above this field are cleared when resetting the iterator for reuse.
@@ -217,9 +214,14 @@ func newColumnBlockSingleLevelIterator(
217
214
i .vbRH = r .blockReader .UsePreallocatedReadHandle (objstorage .NoReadBefore , & i .vbRHPrealloc )
218
215
}
219
216
i .data .InitOnce (r .keySchema , r .Comparer , & i .internalValueConstructor )
220
-
221
- // Use lazy loading by default - index will be loaded on first access
222
- i .indexLoaded = false
217
+ indexH , err := r .readTopLevelIndexBlock (ctx , i .readEnv .Block , i .indexFilterRH )
218
+ if err == nil {
219
+ err = i .index .InitHandle (r .Comparer , indexH , opts .Transforms )
220
+ }
221
+ if err != nil {
222
+ _ = i .Close ()
223
+ return nil , err
224
+ }
223
225
return i , nil
224
226
}
225
227
@@ -252,8 +254,14 @@ func newRowBlockSingleLevelIterator(
252
254
i .data .SetHasValuePrefix (true )
253
255
}
254
256
255
- // Use lazy loading by default - index will be loaded on first access
256
- i .indexLoaded = false
257
+ indexH , err := r .readTopLevelIndexBlock (ctx , i .readEnv .Block , i .indexFilterRH )
258
+ if err == nil {
259
+ err = i .index .InitHandle (r .Comparer , indexH , opts .Transforms )
260
+ }
261
+ if err != nil {
262
+ _ = i .Close ()
263
+ return nil , err
264
+ }
257
265
return i , nil
258
266
}
259
267
@@ -441,7 +449,7 @@ func (i *singleLevelIterator[I, PI, P, PD]) SetContext(ctx context.Context) {
441
449
// unpositioned. If unsuccessful, it sets i.err to any error encountered, which
442
450
// may be nil if we have simply exhausted the entire table.
443
451
func (i * singleLevelIterator [I , PI , P , PD ]) loadDataBlock (dir int8 ) loadBlockResult {
444
- if i . err != nil || ! PI (& i .index ).Valid () {
452
+ if ! PI (& i .index ).Valid () {
445
453
// Ensure the data block iterator is invalidated even if loading of the block
446
454
// fails.
447
455
PD (& i .data ).Invalidate ()
@@ -513,12 +521,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) ReadValueBlock(
513
521
// apprioriate bound, depending on the iteration direction, and returns either
514
522
// `blockIntersects` or `blockExcluded`.
515
523
func (i * singleLevelIterator [I , PI , D , PD ]) resolveMaybeExcluded (dir int8 ) intersectsResult {
516
- if ! i .indexLoaded {
517
- if err := i .ensureIndexLoaded (); err != nil {
518
- i .err = err
519
- return blockExcluded
520
- }
521
- }
522
524
// TODO(jackson): We could first try comparing to top-level index block's
523
525
// key, and if within bounds avoid per-data block key comparisons.
524
526
@@ -680,13 +682,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) SeekGE(
680
682
func (i * singleLevelIterator [I , PI , D , PD ]) seekGEHelper (
681
683
key []byte , boundsCmp int , flags base.SeekGEFlags ,
682
684
) * base.InternalKV {
683
- if ! i .indexLoaded {
684
- if err := i .ensureIndexLoaded (); err != nil {
685
- i .err = err
686
- return nil
687
- }
688
- }
689
-
690
685
// Invariant: trySeekUsingNext => !i.data.isDataInvalidated() && i.exhaustedBounds != +1
691
686
692
687
// SeekGE performs various step-instead-of-seeking optimizations: eg enabled
@@ -823,7 +818,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) seekPrefixGE(
823
818
flags = flags .DisableTrySeekUsingNext ()
824
819
}
825
820
i .lastBloomFilterMatched = false
826
-
827
821
// Check prefix bloom filter.
828
822
var mayContain bool
829
823
mayContain , i .err = i .bloomFilterMayContain (prefix )
@@ -928,13 +922,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) virtualLastSeekLE() *base.InternalKV
928
922
i .boundsCmp = 0
929
923
i .positionedUsingLatestBounds = true
930
924
931
- if ! i .indexLoaded {
932
- if err := i .ensureIndexLoaded (); err != nil {
933
- i .err = err
934
- return nil
935
- }
936
- }
937
-
938
925
indexOk := PI (& i .index ).SeekGE (key )
939
926
// We can have multiple internal keys with the same user key as the seek
940
927
// key. In that case, we want the last (greatest) internal key.
@@ -1006,12 +993,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) virtualLastSeekLE() *base.InternalKV
1006
993
func (i * singleLevelIterator [I , PI , D , PD ]) SeekLT (
1007
994
key []byte , flags base.SeekLTFlags ,
1008
995
) * base.InternalKV {
1009
- if ! i .indexLoaded {
1010
- if err := i .ensureIndexLoaded (); err != nil {
1011
- i .err = err
1012
- return nil
1013
- }
1014
- }
1015
996
if i .readEnv .Virtual != nil {
1016
997
// Might have to fix upper bound since virtual sstable bounds are not
1017
998
// known to callers of SeekLT.
@@ -1135,12 +1116,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) First() *base.InternalKV {
1135
1116
// index file. For the latter, one cannot make any claims about absolute
1136
1117
// positioning.
1137
1118
func (i * singleLevelIterator [I , PI , D , PD ]) firstInternal () * base.InternalKV {
1138
- if ! i .indexLoaded {
1139
- if err := i .ensureIndexLoaded (); err != nil {
1140
- i .err = err
1141
- return nil
1142
- }
1143
- }
1144
1119
i .exhaustedBounds = 0
1145
1120
i .err = nil // clear cached iteration error
1146
1121
// Seek optimization only applies until iterator is first positioned after SetBounds.
@@ -1205,12 +1180,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) Last() *base.InternalKV {
1205
1180
// index file. For the latter, one cannot make any claims about absolute
1206
1181
// positioning.
1207
1182
func (i * singleLevelIterator [I , PI , D , PD ]) lastInternal () * base.InternalKV {
1208
- if ! i .indexLoaded {
1209
- if err := i .ensureIndexLoaded (); err != nil {
1210
- i .err = err
1211
- return nil
1212
- }
1213
- }
1214
1183
i .exhaustedBounds = 0
1215
1184
i .err = nil // clear cached iteration error
1216
1185
// Seek optimization only applies until iterator is first positioned after SetBounds.
@@ -1280,12 +1249,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) Next() *base.InternalKV {
1280
1249
1281
1250
// NextPrefix implements (base.InternalIterator).NextPrefix.
1282
1251
func (i * singleLevelIterator [I , PI , D , PD ]) NextPrefix (succKey []byte ) * base.InternalKV {
1283
- if ! i .indexLoaded {
1284
- if err := i .ensureIndexLoaded (); err != nil {
1285
- i .err = err
1286
- return nil
1287
- }
1288
- }
1289
1252
if i .exhaustedBounds == + 1 {
1290
1253
panic ("NextPrefix called even though exhausted upper bound" )
1291
1254
}
@@ -1380,12 +1343,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) Prev() *base.InternalKV {
1380
1343
}
1381
1344
1382
1345
func (i * singleLevelIterator [I , PI , D , PD ]) skipForward () * base.InternalKV {
1383
- if ! i .indexLoaded {
1384
- if err := i .ensureIndexLoaded (); err != nil {
1385
- i .err = err
1386
- return nil
1387
- }
1388
- }
1389
1346
for {
1390
1347
if ! PI (& i .index ).Next () {
1391
1348
PD (& i .data ).Invalidate ()
@@ -1464,12 +1421,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) skipForward() *base.InternalKV {
1464
1421
}
1465
1422
1466
1423
func (i * singleLevelIterator [I , PI , D , PD ]) skipBackward () * base.InternalKV {
1467
- if ! i .indexLoaded {
1468
- if err := i .ensureIndexLoaded (); err != nil {
1469
- i .err = err
1470
- return nil
1471
- }
1472
- }
1473
1424
for {
1474
1425
if ! PI (& i .index ).Prev () {
1475
1426
PD (& i .data ).Invalidate ()
@@ -1563,8 +1514,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) closeInternal() error {
1563
1514
}
1564
1515
var err error
1565
1516
err = firstError (err , PD (& i .data ).Close ())
1566
- // Always close index iterator unconditionally to avoid BufferPool panic
1567
- // Even if lazy loading wasn't used, the index might have been initialized
1568
1517
err = firstError (err , PI (& i .index ).Close ())
1569
1518
if i .indexFilterRH != nil {
1570
1519
err = firstError (err , i .indexFilterRH .Close ())
@@ -1583,7 +1532,6 @@ func (i *singleLevelIterator[I, PI, D, PD]) closeInternal() error {
1583
1532
err = firstError (err , i .vbRH .Close ())
1584
1533
i .vbRH = nil
1585
1534
}
1586
- i .indexLoaded = false
1587
1535
return err
1588
1536
}
1589
1537
@@ -1598,16 +1546,3 @@ func (i *singleLevelIterator[I, PI, D, PD]) String() string {
1598
1546
func (i * singleLevelIterator [I , PI , D , PD ]) DebugTree (tp treeprinter.Node ) {
1599
1547
tp .Childf ("%T(%p) fileNum=%s" , i , i , i .String ())
1600
1548
}
1601
-
1602
- func (i * singleLevelIterator [I , PI , D , PD ]) ensureIndexLoaded () error {
1603
- indexH , err := i .reader .readTopLevelIndexBlock (i .ctx , i .readEnv .Block , i .indexFilterRH )
1604
- if err == nil {
1605
- err = PI (& i .index ).InitHandle (i .reader .Comparer , indexH , i .transforms )
1606
- }
1607
- if err != nil {
1608
- return err
1609
- }
1610
-
1611
- i .indexLoaded = true
1612
- return nil
1613
- }
0 commit comments