diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java index 5a4c907fadf3..cfccf30e3a0d 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java @@ -40,6 +40,7 @@ import java.io.IOException; import java.net.URI; +import java.util.Objects; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -187,10 +188,7 @@ public static void main(String[] args) throws Exception { @Override public int run(String[] args) throws IOException { - if (conf == null) { - LOG.error("Tool configuration is not initialized"); - throw new NullPointerException("conf"); - } + Objects.requireNonNull(conf, "Tool configuration is not initialized"); CommandLine cmd; try { diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java index 41ce5d4d1df0..39cc440a4edc 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java @@ -35,6 +35,7 @@ import java.io.IOException; import java.net.URI; import java.util.List; +import java.util.Objects; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; @@ -232,10 +233,7 @@ public static void main(String[] args) throws Exception { @Override public int run(String[] args) { - if (conf == null) { - LOG.error("Tool configuration is not initialized"); - throw new NullPointerException("conf"); - } + Objects.requireNonNull(conf, "Tool configuration is not initialized"); CommandLine cmd; try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index f93c3cce9fa6..65a3393cf8e8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.NavigableMap; +import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.regex.Matcher; @@ -230,13 +231,13 @@ private static List fullScan(Connection connection, QueryType type) thro * Callers should call close on the returned {@link Table} instance. * @param connection connection we're using to access Meta * @return An {@link Table} for hbase:meta + * @throws NullPointerException if {@code connection} is {@code null} */ public static Table getMetaHTable(final Connection connection) throws IOException { // We used to pass whole CatalogTracker in here, now we just pass in Connection - if (connection == null) { - throw new NullPointerException("No connection"); - } else if (connection.isClosed()) { + Objects.requireNonNull(connection, "Connection cannot be null"); + if (connection.isClosed()) { throw new IOException("connection is closed"); } return connection.getTable(TableName.META_TABLE_NAME); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index a73a2bc2e302..2dcfe0e7d6b3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.Map; import java.util.NavigableSet; +import java.util.Objects; import java.util.function.Function; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -1594,14 +1595,13 @@ public static ScanMetrics toScanMetrics(final byte[] bytes) { } /** - * Unwraps an exception from a protobuf service into the underlying (expected) IOException. - * This method will always throw an exception. + * Unwraps an exception from a protobuf service into the underlying (expected) IOException. This + * method will always throw an exception. * @param se the {@code ServiceException} instance to convert into an {@code IOException} + * @throws NullPointerException if {@code se} is {@code null} */ public static void toIOException(ServiceException se) throws IOException { - if (se == null) { - throw new NullPointerException("Null service exception passed!"); - } + Objects.requireNonNull(se, "Service exception cannot be null"); Throwable cause = se.getCause(); if (cause != null && cause instanceof IOException) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java index 484cee3cb511..b3152c3c0eff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java @@ -21,6 +21,7 @@ import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hbase.HConstants; @@ -56,6 +57,7 @@ private AccessControlUtil() {} * @param qualifier optional qualifier * @param actions the permissions to be granted * @return A {@link AccessControlProtos} GrantRequest + * @throws NullPointerException if {@code tableName} is {@code null} */ public static AccessControlProtos.GrantRequest buildGrantRequest( String username, TableName tableName, byte[] family, byte[] qualifier, @@ -67,9 +69,9 @@ public static AccessControlProtos.GrantRequest buildGrantRequest( for (AccessControlProtos.Permission.Action a : actions) { permissionBuilder.addAction(a); } - if (tableName == null) { - throw new NullPointerException("TableName cannot be null"); - } + + Objects.requireNonNull(tableName, "TableName cannot be null"); + permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName)); if (family != null) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 81082174bc0a..94a2805b61cc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -35,6 +35,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.NavigableSet; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; @@ -1965,14 +1966,13 @@ public static MapReduceProtos.ScanMetrics toScanMetrics(ScanMetrics scanMetrics, } /** - * Unwraps an exception from a protobuf service into the underlying (expected) IOException. - * This method will always throw an exception. + * Unwraps an exception from a protobuf service into the underlying (expected) IOException. This + * method will always throw an exception. * @param se the {@code ServiceException} instance to convert into an {@code IOException} + * @throws NullPointerException if {@code se} is {@code null} */ public static void toIOException(ServiceException se) throws IOException { - if (se == null) { - throw new NullPointerException("Null service exception passed!"); - } + Objects.requireNonNull(se, "Service exception cannot be null"); Throwable cause = se.getCause(); if (cause != null && cause instanceof IOException) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java index 83635cb4b11e..13025569276f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferWriterOutputStream.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.Objects; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -58,22 +59,21 @@ public ByteBufferWriterOutputStream(OutputStream os, int size) { } /** - * Writes len bytes from the specified ByteBuffer starting at offset off to - * this OutputStream. If b is null, a NullPointerException is thrown. If off - * is negative or larger than the ByteBuffer then an ArrayIndexOutOfBoundsException - * is thrown. If len is greater than the length of the ByteBuffer, then an - * ArrayIndexOutOfBoundsException is thrown. This method does not change the - * position of the ByteBuffer. - * - * @param b the ByteBuffer - * @param off the start offset in the data - * @param len the number of bytes to write - * @throws IOException - * if an I/O error occurs. In particular, an IOException is thrown - * if the output stream is closed. + * Writes len bytes from the specified ByteBuffer starting at offset off to this OutputStream. If + * off is negative or larger than the ByteBuffer then an ArrayIndexOutOfBoundsException is thrown. + * If len is greater than the length of the ByteBuffer, then an ArrayIndexOutOfBoundsException is + * thrown. This method does not change the position of the ByteBuffer. + * @param b the ByteBuffer + * @param off the start offset in the data + * @param len the number of bytes to write + * @throws IOException if an I/O error occurs. In particular, an IOException is thrown if the + * output stream is closed. + * @throws NullPointerException if {@code b} is {@code null} */ @Override public void write(ByteBuffer b, int off, int len) throws IOException { + Objects.requireNonNull(b); + // Lazily load in the event that this version of 'write' is not invoked if (this.buf == null) { this.buf = new byte[this.bufSize]; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java index 15351ed4e046..fd512ec4cfdb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java @@ -83,10 +83,8 @@ public void reset() { } protected void init() { + Preconditions.checkState(iv != null, "IV is null"); try { - if (iv == null) { - throw new NullPointerException("IV is null"); - } cipher.init(javax.crypto.Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv)); } catch (InvalidKeyException e) { throw new RuntimeException(e); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java index b0a1ca6af0ff..adb69ff45fa8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java @@ -21,6 +21,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.List; +import java.util.Objects; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -115,10 +116,8 @@ public void setConf(Configuration conf) { @Override public int run(String[] args) throws IOException { cmdLineArgs = args; - if (conf == null) { - LOG.error("Tool configuration is not initialized"); - throw new NullPointerException("conf"); - } + + Objects.requireNonNull(conf, "Tool configuration is not initialized"); CommandLine cmd; List argsList = new ArrayList<>(args.length); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index fa916ff3c5b2..6a0c1cd55c93 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; @@ -1018,19 +1019,18 @@ private static class StreamCapabilities { } /** - * If our FileSystem version includes the StreamCapabilities class, check if - * the given stream has a particular capability. + * If our FileSystem version includes the StreamCapabilities class, check if the given stream has + * a particular capability. * @param stream capabilities are per-stream instance, so check this one specifically. must not be - * null + * null * @param capability what to look for, per Hadoop Common's FileSystem docs * @return true if there are no StreamCapabilities. false if there are, but this stream doesn't * implement it. return result of asking the stream otherwise. + * @throws NullPointerException if {@code stream} is {@code null} */ public static boolean hasCapability(FSDataOutputStream stream, String capability) { // be consistent whether or not StreamCapabilities is present - if (stream == null) { - throw new NullPointerException("stream parameter must not be null."); - } + Objects.requireNonNull(stream, "stream cannot be null"); // If o.a.h.fs.StreamCapabilities doesn't exist, assume everyone does everything // otherwise old versions of Hadoop will break. boolean result = true; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ObjectPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ObjectPool.java index 0a4cd90cce41..30f5a4636103 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ObjectPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ObjectPool.java @@ -20,6 +20,7 @@ import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.Lock; @@ -75,7 +76,7 @@ public interface ObjectFactory { * * @param objectFactory the factory to supply new objects on demand * - * @throws NullPointerException if {@code objectFactory} is null + * @throws NullPointerException if {@code objectFactory} is {@code null} */ public ObjectPool(ObjectFactory objectFactory) { this(objectFactory, DEFAULT_INITIAL_CAPACITY, DEFAULT_CONCURRENCY_LEVEL); @@ -88,7 +89,7 @@ public ObjectPool(ObjectFactory objectFactory) { * @param objectFactory the factory to supply new objects on demand * @param initialCapacity the initial capacity to keep objects in the pool * - * @throws NullPointerException if {@code objectFactory} is null + * @throws NullPointerException if {@code objectFactory} is {@code null} * @throws IllegalArgumentException if {@code initialCapacity} is negative */ public ObjectPool(ObjectFactory objectFactory, int initialCapacity) { @@ -103,7 +104,7 @@ public ObjectPool(ObjectFactory objectFactory, int initialCapacity) { * @param initialCapacity the initial capacity to keep objects in the pool * @param concurrencyLevel the estimated count of concurrently accessing threads * - * @throws NullPointerException if {@code objectFactory} is null + * @throws NullPointerException if {@code objectFactory} is {@code null} * @throws IllegalArgumentException if {@code initialCapacity} is negative or * {@code concurrencyLevel} is non-positive */ @@ -112,10 +113,7 @@ public ObjectPool( int initialCapacity, int concurrencyLevel) { - if (objectFactory == null) { - throw new NullPointerException("Given object factory instance is NULL"); - } - this.objectFactory = objectFactory; + this.objectFactory = Objects.requireNonNull(objectFactory, "Object factory cannot be null"); this.referenceCache = new ConcurrentHashMap>(initialCapacity, 0.75f, concurrencyLevel); @@ -164,10 +162,10 @@ public void purge() { /** * Returns a shared object associated with the given {@code key}, * which is identified by the {@code equals} method. - * @throws NullPointerException if {@code key} is null + * @throws NullPointerException if {@code key} is {@code null} */ public V get(K key) { - Reference ref = referenceCache.get(key); + Reference ref = referenceCache.get(Objects.requireNonNull(key)); if (ref != null) { V obj = ref.get(); if (obj != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java index 74afc69118a7..b127493fc5c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.List; +import java.util.Objects; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; @@ -40,12 +41,13 @@ final public class FilterWrapper extends Filter { Filter filter = null; - public FilterWrapper( Filter filter ) { - if (null == filter) { - // ensure the filter instance is not null - throw new NullPointerException("Cannot create FilterWrapper with null Filter"); - } - this.filter = filter; + /** + * Constructor. + * @param filter filter to wrap + * @throws NullPointerException if {@code filter} is {@code null} + */ + public FilterWrapper(Filter filter) { + this.filter = Objects.requireNonNull(filter, "Cannot create FilterWrapper with null Filter"); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java index a4a805e8c985..228b54c7ab00 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java @@ -21,6 +21,7 @@ import java.io.DataOutput; import java.io.IOException; import java.util.ArrayDeque; +import java.util.Objects; import java.util.Queue; import org.apache.hadoop.hbase.Cell; @@ -163,8 +164,7 @@ private void enqueueReadyChunk(boolean closing) { @Override public void append(Cell cell) throws IOException { - if (cell == null) - throw new NullPointerException(); + Objects.requireNonNull(cell); enqueueReadyChunk(false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileInfo.java index a75aea36fd03..89b3d34a7750 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileInfo.java @@ -29,6 +29,7 @@ import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; @@ -122,12 +123,13 @@ public HFileInfo(ReaderContext context, Configuration conf) throws IOException { * with the reserved prefix * @return this file info object * @throws IOException if the key or value is invalid + * @throws NullPointerException if {@code key} or {@code value} is {@code null} */ public HFileInfo append(final byte[] k, final byte[] v, final boolean checkPrefix) throws IOException { - if (k == null || v == null) { - throw new NullPointerException("Key nor value may be null"); - } + Objects.requireNonNull(k, "key cannot be null"); + Objects.requireNonNull(v, "value cannot be null"); + if (checkPrefix && isReservedFileInfoKey(k)) { throw new IOException("Keys with a " + HFileInfo.RESERVED_PREFIX + " are reserved"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 4ca0b24b991c..99043e83caaf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -43,6 +43,7 @@ import java.util.Map.Entry; import java.util.NavigableMap; import java.util.NavigableSet; +import java.util.Objects; import java.util.Optional; import java.util.RandomAccess; import java.util.Set; @@ -7322,13 +7323,14 @@ public static HRegion openHRegion(final Configuration conf, final FileSystem fs, * @param rsServices An interface we can request flushes against. * @param reporter An interface we can report progress against. * @return new HRegion + * @throws NullPointerException if {@code info} is {@code null} */ public static HRegion openHRegion(final Configuration conf, final FileSystem fs, final Path rootDir, final Path tableDir, final RegionInfo info, final TableDescriptor htd, final WAL wal, final RegionServerServices rsServices, final CancelableProgressable reporter) throws IOException { - if (info == null) throw new NullPointerException("Passed region info is null"); + Objects.requireNonNull(info, "RegionInfo cannot be null"); if (LOG.isDebugEnabled()) { LOG.debug("Opening region: " + info); } @@ -7403,12 +7405,11 @@ protected HRegion openHRegion(final CancelableProgressable reporter) * @param info Info for region to be opened. * @param htd the table descriptor * @return new HRegion + * @throws NullPointerException if {@code info} is {@code null} */ public static HRegion openReadOnlyFileSystemHRegion(final Configuration conf, final FileSystem fs, final Path tableDir, RegionInfo info, final TableDescriptor htd) throws IOException { - if (info == null) { - throw new NullPointerException("Passed region info is null"); - } + Objects.requireNonNull(info, "RegionInfo cannot be null"); if (LOG.isDebugEnabled()) { LOG.debug("Opening region (readOnly filesystem): " + info); } @@ -7426,7 +7427,7 @@ public static void warmupHRegion(final RegionInfo info, final CancelableProgressable reporter) throws IOException { - if (info == null) throw new NullPointerException("Passed region info is null"); + Objects.requireNonNull(info, "RegionInfo cannot be null"); if (LOG.isDebugEnabled()) { LOG.debug("HRegion.Warming up region: " + info); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index a842595aa7d2..fa152c5061ae 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -46,7 +46,9 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; /** @@ -269,9 +271,7 @@ public boolean isHFile() { @Override public boolean isMajorCompactionResult() { - if (this.majorCompaction == null) { - throw new NullPointerException("This has not been set yet"); - } + Preconditions.checkState(this.majorCompaction != null, "Major compation has not been set yet"); return this.majorCompaction.get(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 44363fa70749..7719b53230a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -135,7 +135,7 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; import org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap; @@ -716,11 +716,9 @@ public void start(CoprocessorEnvironment env) throws IOException { } } - if (zkPermissionWatcher == null) { - throw new NullPointerException("ZKPermissionWatcher is null"); - } else if (accessChecker == null) { - throw new NullPointerException("AccessChecker is null"); - } + Preconditions.checkState(zkPermissionWatcher != null, "ZKPermissionWatcher is null"); + Preconditions.checkState(accessChecker != null, "AccessChecker is null"); + // set the user-provider. this.userProvider = UserProvider.instantiate(env.getConfiguration()); tableAcls = new MapMaker().weakValues().makeMap(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java index 3828606bfbb7..efff41e11c86 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java @@ -25,6 +25,7 @@ import java.util.Collection; import java.util.Comparator; import java.util.Iterator; +import java.util.Objects; import java.util.AbstractQueue; import org.apache.yetus.audience.InterfaceAudience; @@ -157,7 +158,7 @@ public BoundedPriorityBlockingQueue(int capacity, @Override public boolean offer(E e) { - if (e == null) throw new NullPointerException(); + Objects.requireNonNull(e); lock.lock(); try { @@ -174,7 +175,7 @@ public boolean offer(E e) { @Override public void put(E e) throws InterruptedException { - if (e == null) throw new NullPointerException(); + Objects.requireNonNull(e); lock.lock(); try { @@ -191,7 +192,7 @@ public void put(E e) throws InterruptedException { @Override public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException { - if (e == null) throw new NullPointerException(); + Objects.requireNonNull(e); long nanos = unit.toNanos(timeout); lock.lockInterruptibly(); @@ -321,8 +322,7 @@ public int drainTo(Collection c) { @Override public int drainTo(Collection c, int maxElements) { - if (c == null) - throw new NullPointerException(); + Objects.requireNonNull(c); if (c == this) throw new IllegalArgumentException(); if (maxElements <= 0)