Skip to content

Commit

Permalink
HBASE-16324 Remove LegacyScanQueryMatcher
Browse files Browse the repository at this point in the history
  • Loading branch information
Apache9 committed Aug 26, 2017
1 parent 2773510 commit 8d33949
Show file tree
Hide file tree
Showing 21 changed files with 552 additions and 1,045 deletions.
Expand Up @@ -19,9 +19,9 @@
package org.apache.hadoop.hbase.coprocessor.example; package org.apache.hadoop.hbase.coprocessor.example;


import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.NavigableSet; import java.util.NavigableSet;
import java.util.OptionalInt;


import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
Expand Down Expand Up @@ -194,9 +194,7 @@ public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocess
// take default action // take default action
return null; return null;
} }
Scan scan = new Scan(); return new StoreScanner(store, scanInfo, OptionalInt.empty(), scanners,
scan.setMaxVersions(scanInfo.getMaxVersions());
return new StoreScanner(store, scanInfo, scan, scanners,
ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
} }


Expand All @@ -210,9 +208,7 @@ public InternalScanner preCompactScannerOpen(
// take default action // take default action
return null; return null;
} }
Scan scan = new Scan(); return new StoreScanner(store, scanInfo, OptionalInt.empty(), scanners, scanType,
scan.setMaxVersions(scanInfo.getMaxVersions());
return new StoreScanner(store, scanInfo, scan, scanners, scanType,
store.getSmallestReadPoint(), earliestPutTs); store.getSmallestReadPoint(), earliestPutTs);
} }


Expand Down
Expand Up @@ -22,6 +22,7 @@
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.OptionalInt;


import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
Expand All @@ -32,7 +33,6 @@
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.CellSink; import org.apache.hadoop.hbase.regionserver.CellSink;
import org.apache.hadoop.hbase.regionserver.HMobStore; import org.apache.hadoop.hbase.regionserver.HMobStore;
import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStore;
Expand Down Expand Up @@ -74,9 +74,7 @@ public ScanType getScanType(CompactionRequest request) {
@Override @Override
public InternalScanner createScanner(List<StoreFileScanner> scanners, public InternalScanner createScanner(List<StoreFileScanner> scanners,
ScanType scanType, FileDetails fd, long smallestReadPoint) throws IOException { ScanType scanType, FileDetails fd, long smallestReadPoint) throws IOException {
Scan scan = new Scan(); return new StoreScanner(store, store.getScanInfo(), OptionalInt.empty(), scanners, scanType,
scan.setMaxVersions(store.getColumnFamilyDescriptor().getMaxVersions());
return new StoreScanner(store, store.getScanInfo(), scan, scanners, scanType,
smallestReadPoint, fd.earliestPutTs); smallestReadPoint, fd.earliestPutTs);
} }
}; };
Expand Down
Expand Up @@ -55,7 +55,6 @@
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy; import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.crypto.Encryption;
Expand Down Expand Up @@ -805,15 +804,12 @@ private Path compactDelFilesInBatch(PartitionedMobCompactionRequest request,
* @throws IOException if IO failure is encountered * @throws IOException if IO failure is encountered
*/ */
private StoreScanner createScanner(List<StoreFile> filesToCompact, ScanType scanType) private StoreScanner createScanner(List<StoreFile> filesToCompact, ScanType scanType)
throws IOException { throws IOException {
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact, List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(filesToCompact,
false, true, false, false, HConstants.LATEST_TIMESTAMP); false, true, false, false, HConstants.LATEST_TIMESTAMP);
Scan scan = new Scan();
scan.setMaxVersions(column.getMaxVersions());
long ttl = HStore.determineTTLFromFamily(column); long ttl = HStore.determineTTLFromFamily(column);
ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparator.COMPARATOR); ScanInfo scanInfo = new ScanInfo(conf, column, ttl, 0, CellComparator.COMPARATOR);
return new StoreScanner(scan, scanInfo, scanType, null, scanners, 0L, return new StoreScanner(scanInfo, scanType, scanners);
HConstants.LATEST_TIMESTAMP);
} }


/** /**
Expand Down
Expand Up @@ -36,7 +36,6 @@
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
Expand Down
Expand Up @@ -19,16 +19,16 @@


package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;


import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;

import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.OptionalInt;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;


/** /**
* The MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator * The MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator
Expand Down Expand Up @@ -106,23 +106,15 @@ public void remove() {


/** /**
* Creates the scanner for compacting the pipeline. * Creates the scanner for compacting the pipeline.
*
* @return the scanner * @return the scanner
*/ */
private StoreScanner createScanner(Store store, List<KeyValueScanner> scanners) private StoreScanner createScanner(Store store, List<KeyValueScanner> scanners)
throws IOException { throws IOException {

// Get all available versions
Scan scan = new Scan(); return new StoreScanner(store, store.getScanInfo(), OptionalInt.of(Integer.MAX_VALUE), scanners,
scan.setMaxVersions(); //Get all available versions ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
StoreScanner internalScanner =
new StoreScanner(store, store.getScanInfo(), scan, scanners,
ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(),
HConstants.OLDEST_TIMESTAMP);

return internalScanner;
} }



/* Refill kev-value set (should be invoked only when KVS is empty) /* Refill kev-value set (should be invoked only when KVS is empty)
* Returns true if KVS is non-empty */ * Returns true if KVS is non-empty */
private boolean refillKVS() { private boolean refillKVS() {
Expand Down
Expand Up @@ -53,11 +53,9 @@ class ReversedStoreScanner extends StoreScanner implements KeyValueScanner {
} }


/** Constructor for testing. */ /** Constructor for testing. */
ReversedStoreScanner(final Scan scan, ScanInfo scanInfo, ScanType scanType, ReversedStoreScanner(Scan scan, ScanInfo scanInfo, NavigableSet<byte[]> columns,
final NavigableSet<byte[]> columns, final List<? extends KeyValueScanner> scanners) List<? extends KeyValueScanner> scanners) throws IOException {
throws IOException { super(scan, scanInfo, columns, scanners);
super(scan, scanInfo, scanType, columns, scanners,
HConstants.LATEST_TIMESTAMP);
} }


@Override @Override
Expand Down
Expand Up @@ -21,16 +21,15 @@
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.OptionalInt;


import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputControlUtil;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
Expand Down Expand Up @@ -86,11 +85,8 @@ protected InternalScanner createScanner(List<KeyValueScanner> snapshotScanners,
smallestReadPoint); smallestReadPoint);
} }
if (scanner == null) { if (scanner == null) {
Scan scan = new Scan(); scanner = new StoreScanner(store, store.getScanInfo(), OptionalInt.empty(), snapshotScanners,
scan.setMaxVersions(store.getScanInfo().getMaxVersions()); ScanType.COMPACT_RETAIN_DELETES, smallestReadPoint, HConstants.OLDEST_TIMESTAMP);
scanner = new StoreScanner(store, store.getScanInfo(), scan,
snapshotScanners, ScanType.COMPACT_RETAIN_DELETES,
smallestReadPoint, HConstants.OLDEST_TIMESTAMP);
} }
assert scanner != null; assert scanner != null;
if (store.getCoprocessorHost() != null) { if (store.getCoprocessorHost() != null) {
Expand Down

0 comments on commit 8d33949

Please sign in to comment.