Skip to content

Commit

Permalink
TEZ-4276: Clean Up Use of Preconditions (#96) (David Mollitor reviewe…
Browse files Browse the repository at this point in the history
…d by Laszlo Bodor)
  • Loading branch information
belugabehr committed Jan 12, 2022
1 parent a6a936d commit 41cbc17
Show file tree
Hide file tree
Showing 35 changed files with 149 additions and 156 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;

import org.apache.tez.common.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;

@SuppressWarnings("unchecked")
Expand All @@ -28,8 +28,7 @@ public class NamedEntityDescriptor<T extends NamedEntityDescriptor<T>> extends E
@InterfaceAudience.Private
public NamedEntityDescriptor(String entityName, String className) {
super(className);
Preconditions.checkArgument(entityName != null, "EntityName must be specified");
this.entityName = entityName;
this.entityName = Objects.requireNonNull(entityName, "EntityName must be specified");
}

public String getEntityName() {
Expand Down
5 changes: 3 additions & 2 deletions tez-api/src/main/java/org/apache/tez/dag/api/Vertex.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
Expand Down Expand Up @@ -276,10 +277,10 @@ public Map<String, LocalResource> getTaskLocalFiles() {
* set environment for all vertices via Tezconfiguration#TEZ_TASK_LAUNCH_ENV
* @param environment
* @return this Vertex
* NullPointerException if {@code environment} is {@code null}
*/
public Vertex setTaskEnvironment(Map<String, String> environment) {
Preconditions.checkArgument(environment != null);
this.taskEnvironment.putAll(environment);
this.taskEnvironment.putAll(Objects.requireNonNull(environment));
return this;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;

import javax.annotation.Nullable;
Expand All @@ -29,7 +30,6 @@
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.tez.dag.api.event.VertexState;
import org.apache.tez.runtime.api.Event;
import org.apache.tez.runtime.api.InputSpecUpdate;
import org.apache.tez.runtime.api.VertexStatistics;
import org.apache.tez.runtime.api.events.CustomProcessorEvent;
Expand Down Expand Up @@ -72,8 +72,7 @@ public class TaskWithLocationHint {
Integer taskIndex;
TaskLocationHint locationHint;
public TaskWithLocationHint(Integer taskIndex, @Nullable TaskLocationHint locationHint) {
Preconditions.checkState(taskIndex != null);
this.taskIndex = taskIndex;
this.taskIndex = Objects.requireNonNull(taskIndex);
this.locationHint = locationHint;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.tez.runtime.api.events;

import java.nio.ByteBuffer;
import java.util.Objects;

import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
Expand All @@ -27,8 +28,6 @@
import org.apache.tez.runtime.api.Event;
import org.apache.tez.runtime.api.TaskAttemptIdentifier;

import org.apache.tez.common.Preconditions;

/**
* Event used to send information from a Task to the VertexManager for a vertex.
* This may be used to send statistics like samples etc to the VertexManager for
Expand All @@ -50,11 +49,17 @@ public class VertexManagerEvent extends Event {
*/
private final ByteBuffer userPayload;

/**
* Constructor.
*
* @param vertexName
* @param userPayload
* @throws NullPointerException if {@code vertexName} or {@code userPayload}
* is {@code null}
*/
private VertexManagerEvent(String vertexName, ByteBuffer userPayload) {
Preconditions.checkArgument(vertexName != null);
Preconditions.checkArgument(userPayload != null);
this.targetVertexName = vertexName;
this.userPayload = userPayload;
this.targetVertexName = Objects.requireNonNull(vertexName);
this.userPayload = Objects.requireNonNull(userPayload);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.tez.common;

import java.io.File;
import java.util.Objects;

import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.log4j.FileAppender;
Expand Down Expand Up @@ -48,14 +49,14 @@ public void activateOptions() {
* The file will be created within the container's log directory.
*
* @param fileName
* @throws NullPointerException if {@code fileName} is {@code null}
* @throws IllegalArgumentException if {@code fileName} is an absolute path
*/
public void setLogFileName(String fileName) {
if (fileName == null || fileName.contains(File.pathSeparator)) {
throw new RuntimeException(
"Invalid filename specified: "
+ fileName
+ " . FileName should not have a path component and should not be empty.");
}
Objects.requireNonNull(fileName);
Preconditions.checkArgument(!fileName.contains(File.pathSeparator),
"Invalid filename specified: " + fileName
+ " . FileName should not have a path component and should not be empty.");
this.logFileName = fileName;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;

import org.apache.hadoop.yarn.api.records.ApplicationId;

import org.apache.tez.common.Preconditions;
import org.apache.tez.util.FastNumberFormat;

import com.google.common.collect.Interner;
Expand All @@ -49,12 +49,13 @@ public class TezDAGID extends TezID {
* Get a DAGID object from given {@link ApplicationId}.
* @param applicationId Application that this dag belongs to
* @param id the dag number
* @throws NullPointerException if {@code obj} is {@code applicationId}
*/
public static TezDAGID getInstance(ApplicationId applicationId, int id) {
// The newly created TezDAGIds are primarily for their hashCode method, and
// will be short-lived.
// Alternately the cache can be keyed by the hash of the incoming paramters.
Preconditions.checkArgument(applicationId != null, "ApplicationID cannot be null");
Objects.requireNonNull(applicationId, "ApplicationID cannot be null");
return tezDAGIDCache.intern(new TezDAGID(applicationId, id));
}

Expand All @@ -63,15 +64,16 @@ public static TezDAGID getInstance(ApplicationId applicationId, int id) {
* @param yarnRMIdentifier YARN RM identifier
* @param appId application number
* @param id the dag number
* @throws NullPointerException if {@code yarnRMIdentifier} is {@code null}
*/
public static TezDAGID getInstance(String yarnRMIdentifier, int appId, int id) {
// The newly created TezDAGIds are primarily for their hashCode method, and
// will be short-lived.
// Alternately the cache can be keyed by the hash of the incoming paramters.
Preconditions.checkArgument(yarnRMIdentifier != null, "yarnRMIdentifier cannot be null");
Objects.requireNonNull(yarnRMIdentifier, "yarnRMIdentifier cannot be null");
return tezDAGIDCache.intern(new TezDAGID(yarnRMIdentifier, appId, id));
}

// Public for Writable serialization. Verify if this is actually required.
public TezDAGID() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
Expand Down Expand Up @@ -59,16 +60,15 @@ public TezTaskAttemptID() {
* Constructs a TaskAttemptID object from given {@link TezTaskID}.
* @param taskID TaskID that this task belongs to
* @param id the task attempt number
* @throws NullPointerException if {@code taskID} is {@code null}
*/
public static TezTaskAttemptID getInstance(TezTaskID taskID, int id) {
Objects.requireNonNull(taskID);
return tezTaskAttemptIDCache.intern(new TezTaskAttemptID(taskID, id));
}

private TezTaskAttemptID(TezTaskID taskId, int id) {
super(id);
if(taskId == null) {
throw new IllegalArgumentException("taskId cannot be null");
}
this.taskId = taskId;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

import org.apache.tez.common.Preconditions;
import org.apache.tez.util.FastNumberFormat;

import com.google.common.collect.Interner;
Expand Down Expand Up @@ -61,15 +61,15 @@ public FastNumberFormat initialValue() {
* Constructs a TezTaskID object from given {@link TezVertexID}.
* @param vertexID the vertexID object for this TezTaskID
* @param id the tip number
* @throws NullPointerException if {@code vertexID} is {@code null}
*/
public static TezTaskID getInstance(TezVertexID vertexID, int id) {
Preconditions.checkArgument(vertexID != null, "vertexID cannot be null");
Objects.requireNonNull(vertexID, "vertexID cannot be null");
return tezTaskIDCache.intern(new TezTaskID(vertexID, id));
}

private TezTaskID(TezVertexID vertexID, int id) {
super(id);
Preconditions.checkArgument(vertexID != null, "vertexID cannot be null");
this.vertexId = vertexID;
this.serializingHash = getHashCode(true);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Objects;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

import org.apache.tez.common.Preconditions;
import org.apache.tez.util.FastNumberFormat;

import com.google.common.collect.Interner;
Expand Down Expand Up @@ -67,9 +67,10 @@ public TezVertexID() {
* Constructs a TezVertexID object from given {@link TezDAGID}.
* @param dagId TezDAGID object for this TezVertexID
* @param id the tip number
* @throws NullPointerException if {@code dagId} is {@code null}
*/
public static TezVertexID getInstance(TezDAGID dagId, int id) {
Preconditions.checkArgument(dagId != null, "DagID cannot be null");
Objects.requireNonNull(dagId, "DagID cannot be null");
return tezVertexIDCache.intern(new TezVertexID(dagId, id));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,31 +191,31 @@ public String apply(Vertex input) {

@Override
public int getVertexTotalTaskCount(String vertexName) {
Preconditions.checkArgument(vertexName != null, "VertexName must be specified");
Objects.requireNonNull(vertexName, "VertexName must be specified");
DAG dag = getDag();
Vertex vertex = dag.getVertex(vertexName);
return vertex.getTotalTasks();
}

@Override
public int getVertexCompletedTaskCount(String vertexName) {
Preconditions.checkArgument(vertexName != null, "VertexName must be specified");
Objects.requireNonNull(vertexName, "VertexName must be specified");
DAG dag = getDag();
Vertex vertex = dag.getVertex(vertexName);
return vertex.getCompletedTasks();
}

@Override
public int getVertexRunningTaskCount(String vertexName) {
Preconditions.checkArgument(vertexName != null, "VertexName must be specified");
Objects.requireNonNull(vertexName, "VertexName must be specified");
DAG dag = getDag();
Vertex vertex = dag.getVertex(vertexName);
return vertex.getRunningTasks();
}

@Override
public long getFirstAttemptStartTime(String vertexName, int taskIndex) {
Preconditions.checkArgument(vertexName != null, "VertexName must be specified");
Objects.requireNonNull(vertexName, "VertexName must be specified");
Preconditions.checkArgument(taskIndex >=0, "TaskIndex must be > 0");
DAG dag = getDag();
Vertex vertex = dag.getVertex(vertexName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,6 @@ public VertexManager(VertexManagerPluginDescriptor pluginDesc, UserGroupInformat
this.rootInputInitEventQueue = new LinkedBlockingQueue<TezEvent>();

pluginContext = new VertexManagerPluginContextImpl();
Preconditions.checkArgument(pluginDesc != null);
payload = pluginDesc.getUserPayload();
pluginFailed = new AtomicBoolean(false);
plugin = ReflectionUtils.createClazzInstance(pluginDesc.getClassName(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import java.util.Objects;

import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.tez.common.Preconditions;

/**
* ExtendedNodeId extends NodeId with unique identifier in addition to hostname and port.
Expand All @@ -33,8 +32,7 @@ public class ExtendedNodeId extends NodeId {
private final String uniqueIdentifier;

public ExtendedNodeId(NodeId nodeId, String uniqueIdentifier) {
Preconditions.checkArgument(nodeId != null);
this.nodeId = nodeId;
this.nodeId = Objects.requireNonNull(nodeId);
this.uniqueIdentifier = uniqueIdentifier == null ? "" : uniqueIdentifier.trim();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.mapred.split;

import java.io.IOException;
import java.util.Objects;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -66,14 +67,12 @@ public void setInputFormat(InputFormat<K, V> wrappedInputFormat) {
}

public void setSplitSizeEstimator(SplitSizeEstimator estimator) {
Preconditions.checkArgument(estimator != null);
this.estimator = estimator;
this.estimator = Objects.requireNonNull(estimator);
LOG.debug("Split size estimator : {}", estimator);
}

public void setSplitLocationProvider(SplitLocationProvider locationProvider) {
Preconditions.checkArgument(locationProvider != null);
this.locationProvider = locationProvider;
this.locationProvider = Objects.requireNonNull(locationProvider);
LOG.debug("Split size location provider: {}", locationProvider);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;

import com.google.common.base.Function;
import org.apache.tez.common.Preconditions;
import com.google.common.collect.Lists;
import org.apache.tez.mapreduce.grouper.GroupedSplitContainer;
import org.apache.tez.mapreduce.grouper.MapredSplitContainer;
Expand Down Expand Up @@ -66,7 +66,7 @@ public InputSplit[] getGroupedSplits(Configuration conf,
public InputSplit[] getGroupedSplits(Configuration conf,
InputSplit[] originalSplits, int desiredNumSplits,
String wrappedInputFormatName, SplitSizeEstimator estimator, SplitLocationProvider locationProvider) throws IOException {
Preconditions.checkArgument(originalSplits != null, "Splits must be specified");
Objects.requireNonNull(originalSplits, "Splits must be specified");

List<SplitContainer> originalSplitContainers = Lists.transform(Arrays.asList(originalSplits),
new Function<InputSplit, SplitContainer>() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -74,14 +75,12 @@ public void setDesiredNumberOfSplits(int num) {
}

public void setSplitSizeEstimator(SplitSizeEstimator estimator) {
Preconditions.checkArgument(estimator != null);
this.estimator = estimator;
this.estimator = Objects.requireNonNull(estimator);
LOG.debug("Split size estimator : {}", estimator);
}

public void setSplitLocationProvider(SplitLocationProvider locationProvider) {
Preconditions.checkArgument(locationProvider != null);
this.locationProvider = locationProvider;
this.locationProvider = Objects.requireNonNull(locationProvider);
LOG.debug("Split location provider : {}", locationProvider);
}

Expand Down
Loading

0 comments on commit 41cbc17

Please sign in to comment.