Skip to content

Commit

Permalink
HDDS-2035 Implement datanode level CLI to reveal pipeline relation. (a…
Browse files Browse the repository at this point in the history
  • Loading branch information
timmylicheng committed Feb 12, 2020
1 parent 90f794f commit e720e7a
Show file tree
Hide file tree
Showing 4 changed files with 200 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.cli.container.ContainerCommands;
import org.apache.hadoop.hdds.scm.cli.datanode.DatanodeCommands;
import org.apache.hadoop.hdds.scm.cli.pipeline.PipelineCommands;
import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
import org.apache.hadoop.hdds.scm.client.ScmClient;
Expand Down Expand Up @@ -59,6 +60,7 @@
SafeModeCommands.class,
ContainerCommands.class,
PipelineCommands.class,
DatanodeCommands.class,
TopologySubcommand.class,
ReplicationManagerCommands.class
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.scm.cli.datanode;

import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.cli.MissingSubcommandException;
import org.apache.hadoop.hdds.scm.cli.SCMCLI;
import picocli.CommandLine;

import java.util.concurrent.Callable;

/**
* Subcommand for datanode related operations.
*/
@CommandLine.Command(
name = "datanode",
description = "Datanode specific operations",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class,
subcommands = {
ListInfoSubcommand.class
})
public class DatanodeCommands implements Callable<Void> {

@CommandLine.ParentCommand
private SCMCLI parent;

public SCMCLI getParent() {
return parent;
}

@Override
public Void call() throws Exception {
throw new MissingSubcommandException(
this.parent.getCmd().getSubcommands().get("datanode"));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds.scm.cli.datanode;

import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.client.ScmClient;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import picocli.CommandLine;

import java.io.IOException;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
* Handler of list datanodes info command.
*/
@CommandLine.Command(
name = "list",
description = "List info of datanodes",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class ListInfoSubcommand implements Callable<Void> {

@CommandLine.ParentCommand
private DatanodeCommands parent;

@CommandLine.Option(names = {"--ip"},
description = "Show info by ip address.",
defaultValue = "",
required = false)
private String ipaddress;

@CommandLine.Option(names = {"--id"},
description = "Show info by datanode UUID.",
defaultValue = "",
required = false)
private String uuid;

private List<Pipeline> pipelines;


@Override
public Void call() throws Exception {
try (ScmClient scmClient = parent.getParent().createScmClient()) {
pipelines = scmClient.listPipelines();
if (isNullOrEmpty(ipaddress) && isNullOrEmpty(uuid)) {
getAllNodes(scmClient).stream().forEach(p -> printDatanodeInfo(p));
} else {
Stream<DatanodeDetails> allNodes = getAllNodes(scmClient).stream();
if (!isNullOrEmpty(ipaddress)) {
allNodes = allNodes.filter(p -> p.getIpAddress()
.compareToIgnoreCase(ipaddress) == 0);
}
if (!isNullOrEmpty(uuid)) {
allNodes = allNodes.filter(p -> p.getUuid().toString().equals(uuid));
}
allNodes.forEach(p -> printDatanodeInfo(p));
}
return null;
}
}

private List<DatanodeDetails> getAllNodes(ScmClient scmClient)
throws IOException {
List<HddsProtos.Node> nodes = scmClient.queryNode(
HddsProtos.NodeState.HEALTHY, HddsProtos.QueryScope.CLUSTER, "");

return nodes.stream()
.map(p -> DatanodeDetails.getFromProtoBuf(p.getNodeID()))
.collect(Collectors.toList());
}

private void printDatanodeInfo(DatanodeDetails datanode) {
StringBuilder pipelineListInfo = new StringBuilder();
int relatedPipelineNum = 0;
if (!pipelines.isEmpty()) {
List<Pipeline> relatedPipelines = pipelines.stream().filter(
p -> p.getNodes().contains(datanode)).collect(Collectors.toList());
if (relatedPipelines.isEmpty()) {
pipelineListInfo.append("No related pipelines" +
" or the node is not in Healthy state.");
} else {
relatedPipelineNum = relatedPipelines.size();
relatedPipelines.stream().forEach(
p -> pipelineListInfo.append(p.getId().getId().toString())
.append("/").append(p.getFactor().toString()).append("/")
.append(p.getType().toString()).append("/")
.append(p.getPipelineState().toString()).append("/")
.append(datanode.getUuid().equals(p.getLeaderId()) ?
"Leader" : "Follower")
.append(System.getProperty("line.separator")));
}
} else {
pipelineListInfo.append("No pipelines in cluster.");
}
System.out.println("Datanode: " + datanode.getUuid().toString() +
" (" + datanode.getIpAddress() + "/"
+ datanode.getHostName() + "/" + relatedPipelineNum +
" pipelines) \n" + "Related pipelines: \n" + pipelineListInfo);
}

protected static boolean isNullOrEmpty(String str) {
return ((str == null) || str.trim().isEmpty());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
* Contains all of the datanode related scm commands.
*/
package org.apache.hadoop.hdds.scm.cli.datanode;

0 comments on commit e720e7a

Please sign in to comment.