Skip to content

Commit

Permalink
Revert "YARN=5181. ClusterNodeTracker: add method to get list of node…
Browse files Browse the repository at this point in the history
…s matching a specific resourceName. (kasha via asuresh)"

This reverts commit e905a42.
  • Loading branch information
xslogic committed Jul 19, 2016
1 parent dc2f4b6 commit 5f2d33a
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 114 deletions.
Expand Up @@ -18,13 +18,11 @@


package org.apache.hadoop.yarn.server.resourcemanager.scheduler; package org.apache.hadoop.yarn.server.resourcemanager.scheduler;


import com.google.common.base.Preconditions;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.yarn.util.resource.Resources;


Expand Down Expand Up @@ -52,8 +50,7 @@ public class ClusterNodeTracker<N extends SchedulerNode> {
private Lock writeLock = readWriteLock.writeLock(); private Lock writeLock = readWriteLock.writeLock();


private HashMap<NodeId, N> nodes = new HashMap<>(); private HashMap<NodeId, N> nodes = new HashMap<>();
private Map<String, N> nodeNameToNodeMap = new HashMap<>(); private Map<String, Integer> nodesPerRack = new HashMap<>();
private Map<String, List<N>> nodesPerRack = new HashMap<>();


private Resource clusterCapacity = Resources.clone(Resources.none()); private Resource clusterCapacity = Resources.clone(Resources.none());
private Resource staleClusterCapacity = null; private Resource staleClusterCapacity = null;
Expand All @@ -69,16 +66,14 @@ public void addNode(N node) {
writeLock.lock(); writeLock.lock();
try { try {
nodes.put(node.getNodeID(), node); nodes.put(node.getNodeID(), node);
nodeNameToNodeMap.put(node.getNodeName(), node);


// Update nodes per rack as well // Update nodes per rack as well
String rackName = node.getRackName(); String rackName = node.getRackName();
List<N> nodesList = nodesPerRack.get(rackName); Integer numNodes = nodesPerRack.get(rackName);
if (nodesList == null) { if (numNodes == null) {
nodesList = new ArrayList<>(); numNodes = 0;
nodesPerRack.put(rackName, nodesList);
} }
nodesList.add(node); nodesPerRack.put(rackName, ++numNodes);


// Update cluster capacity // Update cluster capacity
Resources.addTo(clusterCapacity, node.getTotalResource()); Resources.addTo(clusterCapacity, node.getTotalResource());
Expand Down Expand Up @@ -131,8 +126,8 @@ public int nodeCount(String rackName) {
readLock.lock(); readLock.lock();
String rName = rackName == null ? "NULL" : rackName; String rName = rackName == null ? "NULL" : rackName;
try { try {
List<N> nodesList = nodesPerRack.get(rName); Integer nodeCount = nodesPerRack.get(rName);
return nodesList == null ? 0 : nodesList.size(); return nodeCount == null ? 0 : nodeCount;
} finally { } finally {
readLock.unlock(); readLock.unlock();
} }
Expand All @@ -159,18 +154,14 @@ public N removeNode(NodeId nodeId) {
LOG.warn("Attempting to remove a non-existent node " + nodeId); LOG.warn("Attempting to remove a non-existent node " + nodeId);
return null; return null;
} }
nodeNameToNodeMap.remove(node.getNodeName());


// Update nodes per rack as well // Update nodes per rack as well
String rackName = node.getRackName(); String rackName = node.getRackName();
List<N> nodesList = nodesPerRack.get(rackName); Integer numNodes = nodesPerRack.get(rackName);
if (nodesList == null) { if (numNodes > 0) {
LOG.error("Attempting to remove node from an empty rack " + rackName); nodesPerRack.put(rackName, --numNodes);
} else { } else {
nodesList.remove(node); LOG.error("Attempting to remove node from an empty rack " + rackName);
if (nodesList.isEmpty()) {
nodesPerRack.remove(rackName);
}
} }


// Update cluster capacity // Update cluster capacity
Expand Down Expand Up @@ -306,29 +297,4 @@ public List<N> sortedNodeList(Comparator<N> comparator) {
Collections.sort(sortedList, comparator); Collections.sort(sortedList, comparator);
return sortedList; return sortedList;
} }

/**
* Convenience method to return list of nodes corresponding to resourceName
* passed in the {@link ResourceRequest}.
*
* @param resourceName Host/rack name of the resource, or
* {@link ResourceRequest#ANY}
* @return list of nodes that match the resourceName
*/
public List<N> getNodesByResourceName(final String resourceName) {
Preconditions.checkArgument(
resourceName != null && !resourceName.isEmpty());
List<N> retNodes = new ArrayList<>();
if (ResourceRequest.ANY.equals(resourceName)) {
return getAllNodes();
} else if (nodeNameToNodeMap.containsKey(resourceName)) {
retNodes.add(nodeNameToNodeMap.get(resourceName));
} else if (nodesPerRack.containsKey(resourceName)) {
return nodesPerRack.get(resourceName);
} else {
LOG.info(
"Could not find a node matching given resourceName " + resourceName);
}
return retNodes;
}
} }

This file was deleted.

0 comments on commit 5f2d33a

Please sign in to comment.