Skip to content

Commit

Permalink
HBASE-23308: Review of NullPointerExceptions (#836)
Browse files Browse the repository at this point in the history
Signed-off-by: stack <stack@apache.org>
  • Loading branch information
belugabehr authored and saintstack committed Nov 19, 2019
1 parent ca6e67a commit 33bedf8
Show file tree
Hide file tree
Showing 18 changed files with 86 additions and 89 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@

import java.io.IOException;
import java.net.URI;
import java.util.Objects;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -187,10 +188,7 @@ public static void main(String[] args) throws Exception {

@Override
public int run(String[] args) throws IOException {
if (conf == null) {
LOG.error("Tool configuration is not initialized");
throw new NullPointerException("conf");
}
Objects.requireNonNull(conf, "Tool configuration is not initialized");

CommandLine cmd;
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Objects;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -232,10 +233,7 @@ public static void main(String[] args) throws Exception {

@Override
public int run(String[] args) {
if (conf == null) {
LOG.error("Tool configuration is not initialized");
throw new NullPointerException("conf");
}
Objects.requireNonNull(conf, "Tool configuration is not initialized");

CommandLine cmd;
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.Set;
import java.util.SortedMap;
import java.util.regex.Matcher;
Expand Down Expand Up @@ -230,13 +231,13 @@ private static List<Result> fullScan(Connection connection, QueryType type) thro
* Callers should call close on the returned {@link Table} instance.
* @param connection connection we're using to access Meta
* @return An {@link Table} for <code>hbase:meta</code>
* @throws NullPointerException if {@code connection} is {@code null}
*/
public static Table getMetaHTable(final Connection connection)
throws IOException {
// We used to pass whole CatalogTracker in here, now we just pass in Connection
if (connection == null) {
throw new NullPointerException("No connection");
} else if (connection.isClosed()) {
Objects.requireNonNull(connection, "Connection cannot be null");
if (connection.isClosed()) {
throw new IOException("connection is closed");
}
return connection.getTable(TableName.META_TABLE_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Objects;
import java.util.function.Function;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
Expand Down Expand Up @@ -1594,14 +1595,13 @@ public static ScanMetrics toScanMetrics(final byte[] bytes) {
}

/**
* Unwraps an exception from a protobuf service into the underlying (expected) IOException.
* This method will <strong>always</strong> throw an exception.
* Unwraps an exception from a protobuf service into the underlying (expected) IOException. This
* method will <strong>always</strong> throw an exception.
* @param se the {@code ServiceException} instance to convert into an {@code IOException}
* @throws NullPointerException if {@code se} is {@code null}
*/
public static void toIOException(ServiceException se) throws IOException {
if (se == null) {
throw new NullPointerException("Null service exception passed!");
}
Objects.requireNonNull(se, "Service exception cannot be null");

Throwable cause = se.getCause();
if (cause != null && cause instanceof IOException) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.HConstants;
Expand Down Expand Up @@ -56,6 +57,7 @@ private AccessControlUtil() {}
* @param qualifier optional qualifier
* @param actions the permissions to be granted
* @return A {@link AccessControlProtos} GrantRequest
* @throws NullPointerException if {@code tableName} is {@code null}
*/
public static AccessControlProtos.GrantRequest buildGrantRequest(
String username, TableName tableName, byte[] family, byte[] qualifier,
Expand All @@ -67,9 +69,9 @@ public static AccessControlProtos.GrantRequest buildGrantRequest(
for (AccessControlProtos.Permission.Action a : actions) {
permissionBuilder.addAction(a);
}
if (tableName == null) {
throw new NullPointerException("TableName cannot be null");
}

Objects.requireNonNull(tableName, "TableName cannot be null");

permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));

if (family != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
Expand Down Expand Up @@ -1965,14 +1966,13 @@ public static MapReduceProtos.ScanMetrics toScanMetrics(ScanMetrics scanMetrics,
}

/**
* Unwraps an exception from a protobuf service into the underlying (expected) IOException.
* This method will <strong>always</strong> throw an exception.
* Unwraps an exception from a protobuf service into the underlying (expected) IOException. This
* method will <strong>always</strong> throw an exception.
* @param se the {@code ServiceException} instance to convert into an {@code IOException}
* @throws NullPointerException if {@code se} is {@code null}
*/
public static void toIOException(ServiceException se) throws IOException {
if (se == null) {
throw new NullPointerException("Null service exception passed!");
}
Objects.requireNonNull(se, "Service exception cannot be null");

Throwable cause = se.getCause();
if (cause != null && cause instanceof IOException) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Objects;

import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
Expand Down Expand Up @@ -58,22 +59,21 @@ public ByteBufferWriterOutputStream(OutputStream os, int size) {
}

/**
* Writes len bytes from the specified ByteBuffer starting at offset off to
* this OutputStream. If b is null, a NullPointerException is thrown. If off
* is negative or larger than the ByteBuffer then an ArrayIndexOutOfBoundsException
* is thrown. If len is greater than the length of the ByteBuffer, then an
* ArrayIndexOutOfBoundsException is thrown. This method does not change the
* position of the ByteBuffer.
*
* @param b the ByteBuffer
* @param off the start offset in the data
* @param len the number of bytes to write
* @throws IOException
* if an I/O error occurs. In particular, an IOException is thrown
* if the output stream is closed.
* Writes len bytes from the specified ByteBuffer starting at offset off to this OutputStream. If
* off is negative or larger than the ByteBuffer then an ArrayIndexOutOfBoundsException is thrown.
* If len is greater than the length of the ByteBuffer, then an ArrayIndexOutOfBoundsException is
* thrown. This method does not change the position of the ByteBuffer.
* @param b the ByteBuffer
* @param off the start offset in the data
* @param len the number of bytes to write
* @throws IOException if an I/O error occurs. In particular, an IOException is thrown if the
* output stream is closed.
* @throws NullPointerException if {@code b} is {@code null}
*/
@Override
public void write(ByteBuffer b, int off, int len) throws IOException {
Objects.requireNonNull(b);

// Lazily load in the event that this version of 'write' is not invoked
if (this.buf == null) {
this.buf = new byte[this.bufSize];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,8 @@ public void reset() {
}

protected void init() {
Preconditions.checkState(iv != null, "IV is null");
try {
if (iv == null) {
throw new NullPointerException("IV is null");
}
cipher.init(javax.crypto.Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv));
} catch (InvalidKeyException e) {
throw new RuntimeException(e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Objects;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
Expand Down Expand Up @@ -115,10 +116,8 @@ public void setConf(Configuration conf) {
@Override
public int run(String[] args) throws IOException {
cmdLineArgs = args;
if (conf == null) {
LOG.error("Tool configuration is not initialized");
throw new NullPointerException("conf");
}

Objects.requireNonNull(conf, "Tool configuration is not initialized");

CommandLine cmd;
List<String> argsList = new ArrayList<>(args.length);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -1018,19 +1019,18 @@ private static class StreamCapabilities {
}

/**
* If our FileSystem version includes the StreamCapabilities class, check if
* the given stream has a particular capability.
* If our FileSystem version includes the StreamCapabilities class, check if the given stream has
* a particular capability.
* @param stream capabilities are per-stream instance, so check this one specifically. must not be
* null
* null
* @param capability what to look for, per Hadoop Common's FileSystem docs
* @return true if there are no StreamCapabilities. false if there are, but this stream doesn't
* implement it. return result of asking the stream otherwise.
* @throws NullPointerException if {@code stream} is {@code null}
*/
public static boolean hasCapability(FSDataOutputStream stream, String capability) {
// be consistent whether or not StreamCapabilities is present
if (stream == null) {
throw new NullPointerException("stream parameter must not be null.");
}
Objects.requireNonNull(stream, "stream cannot be null");
// If o.a.h.fs.StreamCapabilities doesn't exist, assume everyone does everything
// otherwise old versions of Hadoop will break.
boolean result = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.Lock;
Expand Down Expand Up @@ -75,7 +76,7 @@ public interface ObjectFactory<K, V> {
*
* @param objectFactory the factory to supply new objects on demand
*
* @throws NullPointerException if {@code objectFactory} is null
* @throws NullPointerException if {@code objectFactory} is {@code null}
*/
public ObjectPool(ObjectFactory<K, V> objectFactory) {
this(objectFactory, DEFAULT_INITIAL_CAPACITY, DEFAULT_CONCURRENCY_LEVEL);
Expand All @@ -88,7 +89,7 @@ public ObjectPool(ObjectFactory<K, V> objectFactory) {
* @param objectFactory the factory to supply new objects on demand
* @param initialCapacity the initial capacity to keep objects in the pool
*
* @throws NullPointerException if {@code objectFactory} is null
* @throws NullPointerException if {@code objectFactory} is {@code null}
* @throws IllegalArgumentException if {@code initialCapacity} is negative
*/
public ObjectPool(ObjectFactory<K, V> objectFactory, int initialCapacity) {
Expand All @@ -103,7 +104,7 @@ public ObjectPool(ObjectFactory<K, V> objectFactory, int initialCapacity) {
* @param initialCapacity the initial capacity to keep objects in the pool
* @param concurrencyLevel the estimated count of concurrently accessing threads
*
* @throws NullPointerException if {@code objectFactory} is null
* @throws NullPointerException if {@code objectFactory} is {@code null}
* @throws IllegalArgumentException if {@code initialCapacity} is negative or
* {@code concurrencyLevel} is non-positive
*/
Expand All @@ -112,10 +113,7 @@ public ObjectPool(
int initialCapacity,
int concurrencyLevel) {

if (objectFactory == null) {
throw new NullPointerException("Given object factory instance is NULL");
}
this.objectFactory = objectFactory;
this.objectFactory = Objects.requireNonNull(objectFactory, "Object factory cannot be null");

this.referenceCache =
new ConcurrentHashMap<K, Reference<V>>(initialCapacity, 0.75f, concurrencyLevel);
Expand Down Expand Up @@ -164,10 +162,10 @@ public void purge() {
/**
* Returns a shared object associated with the given {@code key},
* which is identified by the {@code equals} method.
* @throws NullPointerException if {@code key} is null
* @throws NullPointerException if {@code key} is {@code null}
*/
public V get(K key) {
Reference<V> ref = referenceCache.get(key);
Reference<V> ref = referenceCache.get(Objects.requireNonNull(key));
if (ref != null) {
V obj = ref.get();
if (obj != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.io.IOException;
import java.util.List;
import java.util.Objects;

import org.apache.hadoop.hbase.Cell;
import org.apache.yetus.audience.InterfaceAudience;
Expand All @@ -40,12 +41,13 @@
final public class FilterWrapper extends Filter {
Filter filter = null;

public FilterWrapper( Filter filter ) {
if (null == filter) {
// ensure the filter instance is not null
throw new NullPointerException("Cannot create FilterWrapper with null Filter");
}
this.filter = filter;
/**
* Constructor.
* @param filter filter to wrap
* @throws NullPointerException if {@code filter} is {@code null}
*/
public FilterWrapper(Filter filter) {
this.filter = Objects.requireNonNull(filter, "Cannot create FilterWrapper with null Filter");
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Objects;
import java.util.Queue;

import org.apache.hadoop.hbase.Cell;
Expand Down Expand Up @@ -163,8 +164,7 @@ private void enqueueReadyChunk(boolean closing) {

@Override
public void append(Cell cell) throws IOException {
if (cell == null)
throw new NullPointerException();
Objects.requireNonNull(cell);

enqueueReadyChunk(false);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
Expand Down Expand Up @@ -122,12 +123,13 @@ public HFileInfo(ReaderContext context, Configuration conf) throws IOException {
* with the reserved prefix
* @return this file info object
* @throws IOException if the key or value is invalid
* @throws NullPointerException if {@code key} or {@code value} is {@code null}
*/
public HFileInfo append(final byte[] k, final byte[] v,
final boolean checkPrefix) throws IOException {
if (k == null || v == null) {
throw new NullPointerException("Key nor value may be null");
}
Objects.requireNonNull(k, "key cannot be null");
Objects.requireNonNull(v, "value cannot be null");

if (checkPrefix && isReservedFileInfoKey(k)) {
throw new IOException("Keys with a " + HFileInfo.RESERVED_PREFIX
+ " are reserved");
Expand Down
Loading

0 comments on commit 33bedf8

Please sign in to comment.