Skip to content

Commit

Permalink
Merge pull request #17 from genepi/feature-yarn-support
Browse files Browse the repository at this point in the history
Feature yarn support
  • Loading branch information
lukfor committed Nov 27, 2018
2 parents cc3ce97 + 6b624e6 commit d0e4ffa
Show file tree
Hide file tree
Showing 55 changed files with 294 additions and 519 deletions.
5 changes: 4 additions & 1 deletion files/cloudgene
Expand Up @@ -5,8 +5,11 @@
# The path to the folder containing MyDaemon.jar
FILE_PATH=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )

# Hadoop path with all jars
HADOOP_CLASS_PATH=`hadoop classpath`

# Our classpath including our jar file
CLASS_PATH="$FILE_PATH/cloudgene.jar"
CLASS_PATH="$FILE_PATH/cloudgene.jar:$HADOOP_CLASS_PATH"

# The fully qualified name of the class to execute
CLASS="cloudgene.mapred.CommandLineInterface"
Expand Down
78 changes: 73 additions & 5 deletions pom.xml
Expand Up @@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>cloudgene</groupId>
<artifactId>cloudgene</artifactId>
<version>2.0.0-rc1</version>
<version>2.0.0-rc2</version>
<repositories>
<repository>
<id>maven-restlet</id>
Expand Down Expand Up @@ -215,7 +215,7 @@
<dependency>
<groupId>genepi</groupId>
<artifactId>genepi-hadoop</artifactId>
<version>mr1-1.2.2</version>
<version>mr1-1.2.4</version>
<exclusions>
<exclusion>
<groupId>com.sun.jersey</groupId>
Expand All @@ -225,6 +225,10 @@
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
</exclusion>
</exclusions>
</dependency>

Expand Down Expand Up @@ -298,7 +302,7 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0-cdh5.10.0</version>
<scope>compile</scope>
<scope>test</scope>
<type>test-jar</type>
<exclusions>
<exclusion>
Expand All @@ -311,12 +315,68 @@
</exclusion>
</exclusions>
</dependency>


<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.6.0-cdh5.10.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0-cdh5.10.0</version>
<scope>compile</scope>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0-cdh5.10.0</version>
<scope>test</scope>
<type>test-jar</type>
<exclusions>
<exclusion>
Expand Down Expand Up @@ -352,6 +412,7 @@
<artifactId>jets3t</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>

<dependency>
Expand Down Expand Up @@ -405,6 +466,12 @@
<version>1.2.14</version>
</dependency>

<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>

<dependency>
<groupId>com.esotericsoftware.yamlbeans</groupId>
<artifactId>yamlbeans</artifactId>
Expand Down Expand Up @@ -471,6 +538,7 @@
<groupId>dumbster</groupId>
<artifactId>dumbster</artifactId>
<version>1.6</version>
<scope>test</scope>
</dependency>

<dependency>
Expand Down
31 changes: 15 additions & 16 deletions src/main/html/webapp/components/core/dashboard/dashboard.js
Expand Up @@ -8,24 +8,23 @@ export default Control.extend({

"init": function(element, options) {
var url = 'static/home.stache';
fetch(url).then(function(response) {
return response.text();
}).then(function(data) {
$.get(url,
function(data) {

var template = stache(data);
var template = stache(data);

Counter.findOne({}, function(counter) {
$(element).html(template({
counter: counter,
loggedIn: options.appState.loggedIn
}));
}, function(message) {
$(element).html(template({
counter: undefined,
loggedIn: options.loggedIn
}));
});
Counter.findOne({}, function(counter) {
$(element).html(template({
counter: counter,
loggedIn: options.appState.loggedIn
}));
}, function(message) {
$(element).html(template({
counter: undefined,
loggedIn: options.loggedIn
}));
});

});
});
}
});
2 changes: 1 addition & 1 deletion src/main/html/webapp/helpers/router.js
Expand Up @@ -90,7 +90,7 @@ export default Control.extend({
} else {
view.addClass(this.options.classes);
}
view.html('Loading...');
view.html('');
this.element.append(view);
new Control(view[0], data);

Expand Down
27 changes: 13 additions & 14 deletions src/main/html/webapp/helpers/static-page.js
Expand Up @@ -9,23 +9,22 @@ export default Control.extend({
"init": function(element, options) {
try {

if (options.page){
if (options.page) {
options.template = "static/" + options.page + ".stache";
}

fetch(options.template).then(function(response) {
return response.text();
}).then(function(data) {
var view = stache(data);
if (view) {
$(element).html(view());
} else {
new ErrorPage(element, {
status: "404",
message: "Oops, Sorry We Can't Find That Page!"
});
}
});
$.get(options.template,
function(data) {
var view = stache(data);
if (view) {
$(element).html(view());
} else {
new ErrorPage(element, {
status: "404",
message: "Oops, Sorry We Can't Find That Page!"
});
}
});

} catch (e) {
new ErrorPage(element, {
Expand Down
2 changes: 1 addition & 1 deletion src/main/html/webapp/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/main/html/webapp/package.json
@@ -1,6 +1,6 @@
{
"name": "Cloudgene",
"version": "2.0.0-rc1",
"version": "2.0.0-rc2",
"dependencies": {
"@fortawesome/fontawesome-free": "5.3.1",
"bootbox": "4.4.0",
Expand Down
10 changes: 5 additions & 5 deletions src/main/java/cloudgene/mapred/Main.java
Expand Up @@ -37,7 +37,7 @@

public class Main implements Daemon {

public static final String VERSION = "2.0.0-rc1";
public static final String VERSION = "2.0.0-rc2";

private Database database;

Expand Down Expand Up @@ -71,17 +71,17 @@ public void runCloudgene(Settings settings, String[] args) throws Exception {
settings.checkTechnologies();

// configure logger
if (new File("config/log4j.properties").exists()) {
if (new File("log4j.properties").exists()) {

PropertyConfigurator.configure("config/log4j.properties");
PropertyConfigurator.configure("log4j.properties");

Slf4jLoggerFacade loggerFacade = new Slf4jLoggerFacade();
Engine.getInstance().setLoggerFacade(loggerFacade);

} else {

if (new File("log4j.properties").exists()) {
PropertyConfigurator.configure("log4j.properties");
if (new File("config/log4j.properties").exists()) {
PropertyConfigurator.configure("config/log4j.properties");

Slf4jLoggerFacade loggerFacade = new Slf4jLoggerFacade();
Engine.getInstance().setLoggerFacade(loggerFacade);
Expand Down
8 changes: 5 additions & 3 deletions src/main/java/cloudgene/mapred/api/v2/admin/ArchiveJob.java
Expand Up @@ -57,12 +57,14 @@ public Representation get() {

// delete local directory and hdfs directory
String localOutput = FileUtil.path(settings.getLocalWorkspace(), job.getId());
FileUtil.deleteDirectory(localOutput);

try {
String hdfsOutput = HdfsUtil.makeAbsolute(HdfsUtil.path(settings.getHdfsWorkspace(), job.getId()));

FileUtil.deleteDirectory(localOutput);
HdfsUtil.delete(hdfsOutput);

}catch (NoClassDefFoundError e) {

}
job.setState(AbstractJob.STATE_RETIRED);
dao.update(job);

Expand Down
Expand Up @@ -8,7 +8,6 @@
import java.util.jar.Attributes;
import java.util.jar.Manifest;

import org.apache.hadoop.mapred.ClusterStatus;
import org.restlet.data.MediaType;
import org.restlet.data.Status;
import org.restlet.representation.Representation;
Expand All @@ -23,9 +22,9 @@
import cloudgene.mapred.Main;
import cloudgene.mapred.core.User;
import cloudgene.mapred.util.BaseResource;
import cloudgene.mapred.util.HadoopCluster;
import cloudgene.mapred.util.RBinary;
import cloudgene.mapred.util.Technology;
import genepi.hadoop.HadoopCluster;
import genepi.hadoop.HadoopUtil;
import net.sf.json.JSONObject;

Expand Down Expand Up @@ -90,31 +89,30 @@ public Representation get() {
object.put("hadoop_safemode", false);
}

ClusterStatus cluster = HadoopUtil.getInstance().getClusterDetails();
StringBuffer state = new StringBuffer();
state.append("JobTracker: " + HadoopCluster.getJobTracker() + "\n");
state.append("Default FS: " + HadoopCluster.getDefaultFS() + "\n");
state.append("State: " + cluster.getJobTrackerStatus().toString() + "\n");
state.append("MapTask: " + cluster.getMaxMapTasks() + "\n");
state.append("ReduceTask: " + cluster.getMaxReduceTasks() + "\n");
state.append("State: " + HadoopCluster.getJobTrackerStatus().toString() + "\n");
state.append("MapTask: " + HadoopCluster.getMaxMapTasks() + "\n");
state.append("ReduceTask: " + HadoopCluster.getMaxReduceTasks() + "\n");
state.append("Nodes\n");
for (String tracker : cluster.getActiveTrackerNames()) {
for (String tracker : HadoopCluster.getActiveTrackerNames()) {
state.append(" " + tracker + "\n");
}
state.append("Blacklist:\n");
for (String tracker : cluster.getBlacklistedTrackerNames()) {
for (String tracker : HadoopCluster.getBlacklistedTrackerNames()) {
state.append(" " + tracker + "\n");
}
object.put("hadoop_details", state.toString());
object.put("hadoop_enabled", true);
object.put("hadoop_jobtracker", HadoopCluster.getJobTracker());
object.put("hadoop_hdfs", HadoopCluster.getDefaultFS());
object.put("hadoop_map_tasks", cluster.getMaxMapTasks());
object.put("hadoop_reduce_tasks", cluster.getMaxReduceTasks());
object.put("hadoop_active_nodes", cluster.getActiveTrackerNames().size());
object.put("hadoop_inactive_nodes", cluster.getBlacklistedTrackerNames().size());
object.put("hadoop_map_tasks", HadoopCluster.getMaxMapTasks());
object.put("hadoop_reduce_tasks", HadoopCluster.getMaxReduceTasks());
object.put("hadoop_active_nodes", HadoopCluster.getActiveTrackerNames().size());
object.put("hadoop_inactive_nodes", HadoopCluster.getBlacklistedTrackerNames().size());
object.put("hadoop_nodes",
cluster.getActiveTrackerNames().size() + cluster.getBlacklistedTrackerNames().size());
HadoopCluster.getActiveTrackerNames().size() + HadoopCluster.getBlacklistedTrackerNames().size());
} catch (Exception e) {
object.put("hadoop_enabled", false);
object.put("hadoop_error", "Hadoop cluster is unreachable");
Expand Down

0 comments on commit d0e4ffa

Please sign in to comment.