Skip to content

Commit

Permalink
HBASE-25308 [branch-1] Consume Guava from hbase-thirdparty hbase-shad…
Browse files Browse the repository at this point in the history
…ed-miscellaneous
  • Loading branch information
Andrew Purtell committed Nov 20, 2020
1 parent a0f55cd commit cfaf0fe
Show file tree
Hide file tree
Showing 533 changed files with 1,163 additions and 1,463 deletions.
8 changes: 4 additions & 4 deletions hbase-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -204,10 +204,6 @@
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
Expand Down Expand Up @@ -255,6 +251,10 @@
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-gson</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency>
</dependencies>

<profiles>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@

package org.apache.hadoop.hbase;

import com.google.common.base.Objects;
import com.google.common.collect.Sets;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
Expand All @@ -45,6 +42,8 @@
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.io.VersionedWritable;
import org.apache.hbase.thirdparty.com.google.common.base.Objects;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;


/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,10 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PrettyPrinter;
import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;

import com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;

/**
* An HColumnDescriptor contains information about a column family such as the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase;

import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ServiceException;

import org.apache.commons.logging.Log;
Expand Down Expand Up @@ -48,6 +47,7 @@
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

import java.io.IOException;
import java.io.InterruptedIOException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

package org.apache.hadoop.hbase;

import com.google.common.base.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
Expand All @@ -30,6 +29,7 @@
import org.apache.hadoop.hbase.replication.ReplicationLoadSource;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hbase.thirdparty.com.google.common.base.Objects;

import java.util.Arrays;
import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase;

import com.google.common.net.InetAddresses;
import com.google.protobuf.InvalidProtocolBufferException;

import org.apache.hadoop.hbase.classification.InterfaceAudience;
Expand All @@ -29,6 +28,7 @@
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.thirdparty.com.google.common.net.InetAddresses;

import java.io.Serializable;
import java.util.ArrayList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.hadoop.hbase.client;

import com.google.common.annotations.VisibleForTesting;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
Expand All @@ -30,7 +29,6 @@
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -436,7 +434,6 @@ public void setOperationTimeout(int operationTimeout) {
* @return pool if non null, otherwise returns this.pool if non null, otherwise throws
* RuntimeException
*/
@VisibleForTesting
ExecutorService getPool(ExecutorService pool) {
if (pool != null) {
return pool;
Expand Down Expand Up @@ -803,13 +800,11 @@ private void addReplicaActionsAgain(
* Runnable (that can be submitted to thread pool) that submits MultiAction to a
* single server. The server call is synchronous, therefore we do it on a thread pool.
*/
@VisibleForTesting
class SingleServerRequestRunnable implements Runnable {
private final MultiAction<Row> multiAction;
private final int numAttempt;
private final ServerName server;
private final Set<PayloadCarryingServerCallable> callsInProgress;
@VisibleForTesting
SingleServerRequestRunnable(
MultiAction<Row> multiAction, int numAttempt, ServerName server,
Set<PayloadCarryingServerCallable> callsInProgress) {
Expand Down Expand Up @@ -988,7 +983,6 @@ public Set<PayloadCarryingServerCallable> getCallsInProgress() {
return callsInProgress;
}

@VisibleForTesting
SingleServerRequestRunnable createSingleServerRequest(MultiAction<Row> multiAction, int numAttempt, ServerName server,
Set<PayloadCarryingServerCallable> callsInProgress) {
return new SingleServerRequestRunnable(multiAction, numAttempt, server, callsInProgress);
Expand Down Expand Up @@ -1387,7 +1381,6 @@ private void logNoResubmit(ServerName oldServer, int numAttempt,
}
}

@VisibleForTesting
long getActionsInProgress() {
return actionsInProgress.get();
}
Expand Down Expand Up @@ -1798,15 +1791,13 @@ public Object[] getResults() throws InterruptedIOException {
/**
* Create a callable. Isolated to be easily overridden in the tests.
*/
@VisibleForTesting
protected MultiServerCallable<Row> createCallable(final ServerName server,
TableName tableName, final MultiAction<Row> multi) {
return new MultiServerCallable<Row>(connection, tableName, server,
AsyncProcess.this.rpcFactory, multi, rpcTimeout, tracker, multi.getPriority());
}
}

@VisibleForTesting
protected void updateStats(ServerName server, Map<byte[], MultiResponse.RegionResult> results) {
boolean metrics = AsyncProcess.this.connection.getConnectionMetrics() != null;
boolean stats = AsyncProcess.this.connection.getStatisticsTracker() != null;
Expand All @@ -1823,7 +1814,6 @@ protected void updateStats(ServerName server, Map<byte[], MultiResponse.RegionRe
}
}

@VisibleForTesting
<CResult> AsyncRequestFutureImpl<CResult> createAsyncRequestFuture(
TableName tableName, List<Action<Row>> actions, long nonceGroup, ExecutorService pool,
Batch.Callback<CResult> callback, Object[] results, boolean needResults,
Expand All @@ -1836,13 +1826,11 @@ tableName, actions, nonceGroup, getPool(pool), needResults,
/**
* Create a caller. Isolated to be easily overridden in the tests.
*/
@VisibleForTesting
protected RpcRetryingCaller<MultiResponse> createCaller(PayloadCarryingServerCallable callable,
int rpcTimeout) {
return rpcCallerFactory.<MultiResponse> newCaller(rpcTimeout);
}

@VisibleForTesting
/** Waits until all outstanding tasks are done. Used in tests. */
void waitUntilDone() throws InterruptedIOException {
waitForMaximumCurrentTasks(0, null);
Expand All @@ -1855,7 +1843,6 @@ private void waitForMaximumCurrentTasks(int max, String tableName)
}

// Break out this method so testable
@VisibleForTesting
void waitForMaximumCurrentTasks(int max, final AtomicLong tasksInProgress, final long id,
String tableName) throws InterruptedIOException {
long lastLog = EnvironmentEdgeManager.currentTime();
Expand Down Expand Up @@ -2011,7 +1998,6 @@ private enum Retry {
/**
* Collect all advices from checkers and make the final decision.
*/
@VisibleForTesting
static class RowCheckerHost {
private final List<RowChecker> checkers;
private boolean isEnd = false;
Expand Down Expand Up @@ -2064,7 +2050,6 @@ ReturnCode canTakeOperation(HRegionLocation loc, long rowSize) {
/**
* Provide a way to control the flow of rows iteration.
*/
@VisibleForTesting
interface RowChecker {
enum ReturnCode {
/**
Expand Down Expand Up @@ -2098,7 +2083,6 @@ enum ReturnCode {
* Reduce the limit of heapsize for submitting quickly
* if there is no running task.
*/
@VisibleForTesting
static class SubmittedSizeChecker implements RowChecker {
private final long maxHeapSizeSubmit;
private long heapSize = 0;
Expand Down Expand Up @@ -2128,7 +2112,6 @@ public void reset() {
/**
* limit the max number of tasks in an AsyncProcess.
*/
@VisibleForTesting
static class TaskCountChecker implements RowChecker {
private static final long MAX_WAITING_TIME = 1000; //ms
private final Set<HRegionInfo> regionsIncluded = new HashSet<>();
Expand Down Expand Up @@ -2233,7 +2216,6 @@ public void notifyFinal(ReturnCode code, HRegionLocation loc, long rowSize) {
/**
* limit the request size for each regionserver.
*/
@VisibleForTesting
static class RequestSizeChecker implements RowChecker {
private final long maxHeapSizePerRequest;
private final Map<ServerName, Long> serverRequestSizes = new HashMap<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.client.BufferedMutatorParams.UNSET;
import com.google.common.annotations.VisibleForTesting;
import java.io.Closeable;
import java.io.IOException;
import java.io.InterruptedIOException;
Expand All @@ -38,6 +37,7 @@
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

/**
* <p>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import static org.apache.hadoop.hbase.client.ConnectionUtils.createScanResultCache;

import com.google.common.annotations.VisibleForTesting;

import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.LinkedList;
Expand All @@ -45,6 +43,7 @@
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

/**
* Implements the scanner interface for the HBase client. If there are multiple regions in a table,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@

package org.apache.hadoop.hbase.client;

import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

/**
* Configuration parameters for the connection.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,13 +202,12 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.zookeeper.KeeperException;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingRpcChannel;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
import static org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;

import com.google.common.annotations.VisibleForTesting;

import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
Expand All @@ -44,6 +42,7 @@
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

/**
* Utility used by client connections.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase.client;

import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;

Expand Down Expand Up @@ -186,6 +185,7 @@
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.zookeeper.KeeperException;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Pair;

import com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

/**
* An implementation of {@link RegionLocator}. Used to view region location information for a single
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase.client;

import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
Expand Down Expand Up @@ -77,6 +76,7 @@
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;

/**
* An implementation of {@link Table}. Used to communicate with a single HBase table.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@
*/
package org.apache.hadoop.hbase.client;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder;

import java.io.IOException;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
Expand Down Expand Up @@ -52,6 +49,8 @@
import org.apache.hadoop.hbase.client.AsyncProcess.AsyncRequestFuture;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;

/**
* HTableMultiplexer provides a thread-safe non blocking PUT API across all the tables.
Expand Down

0 comments on commit cfaf0fe

Please sign in to comment.