Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

first commit

  • Loading branch information...
commit f57522191ae3e85fb7e5b930d0ed0917f6747f67 1 parent bb76ec4
Jarred Li leejianwei authored
Showing with 4,259 additions and 1 deletion.
  1. +9 −0 .gitignore
  2. +16 −0 CLI/.gitignore
  3. +14 −0 CLI/.springBeans
  4. +24 −0 CLI/README.txt
  5. +32 −0 CLI/build.gradle
  6. +6 −0 CLI/gradle.properties
  7. +132 −0 CLI/pom.xml
  8. +100 −0 CLI/spring-hadoop-admin.log
  9. +1 −0  CLI/spring-hadoop-admin.properties
  10. +116 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/BaseCommand.java
  11. +142 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/ExecutionsCommand.java
  12. +42 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/InfoCommand.java
  13. +73 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/JobsCommand.java
  14. +41 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/TargetCommand.java
  15. +73 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/plugin/SpringHadoopAdminBannerProvider.java
  16. +42 −0 ...rc/main/java/org/springframework/data/hadoop/admin/cli/plugin/SpringHadoopAdminHistoryFileNameProvider.java
  17. +43 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/plugin/SpringHadoopAdminPromptProvider.java
  18. +35 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/util/Log.java
  19. +59 −0 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/util/PropertyUtil.java
  20. +12 −0 CLI/src/main/resources/META-INF/spring/spring-shell-plugin.xml
  21. +23 −0 CLI/src/main/resources/rest-context.xml
  22. +27 −1 README.md
  23. +60 −0 build.gradle
  24. +66 −0 context.xml
  25. +20 −0 gradle.properties
  26. +6 −0 gradle/wrapper/gradle-wrapper.properties
  27. +164 −0 gradlew
  28. +90 −0 gradlew.bat
  29. +168 −0 pom.xml
  30. +8 −0 samples/hive/hadoop.properties
  31. +69 −0 samples/hive/hive-context.xml
  32. +3 −0  samples/hive/script.sql
  33. +8 −0 samples/pig/hadoop.properties
  34. +45 −0 samples/pig/pig-context.xml
  35. +4 −0 samples/pig/script.pig
  36. +14 −0 samples/wordcount-batch-notification/.springBeans
  37. +37 −0 samples/wordcount-batch-notification/build.gradle
  38. +7 −0 samples/wordcount-batch-notification/gradle.properties
  39. +8 −0 samples/wordcount-batch-notification/hadoop.properties
  40. +91 −0 ...ount-batch-notification/src/main/java/org/springframework/data/hadoop/admin/examples/EmailNotification.java
  41. +49 −0 ...-batch-notification/src/test/java/org/springframework/data/hadoop/admin/examples/EmailNotificationTest.java
  42. +30 −0 samples/wordcount-batch-notification/src/test/resources/mail-context.xml
  43. +93 −0 samples/wordcount-batch-notification/wordcount-notification-context.xml
  44. +8 −0 samples/wordcount-batch/hadoop.properties
  45. +74 −0 samples/wordcount-batch/wordcount-context.xml
  46. +8 −0 samples/wordcount-cron/hadoop.properties
  47. +65 −0 samples/wordcount-cron/wordcount-withoutjob-context.xml
  48. +41 −0 src/main/java/org/springframework/data/hadoop/admin/SpringHadoopAdminException.java
  49. +41 −0 src/main/java/org/springframework/data/hadoop/admin/SpringHadoopAdminWorkflowException.java
  50. +171 −0 src/main/java/org/springframework/data/hadoop/admin/util/HadoopWorkflowDescriptorUtils.java
  51. +308 −0 src/main/java/org/springframework/data/hadoop/admin/util/HadoopWorkflowUtils.java
  52. +41 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/HadoopWorkflowDirectory.java
  53. +61 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/HadoopWorkflowDirectoryFilter.java
  54. +176 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/HadoopWorkflowLaunchRequestAdapter.java
  55. +90 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/HadoopWorkflowRemoveRequestAdapter.java
  56. +63 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/SimpleSpringHadoopTasklet.java
  57. +278 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/support/FileSystemApplicationContextFactory.java
  58. +172 −0 ...n/java/org/springframework/data/hadoop/admin/workflow/support/FileSystemApplicationContextsFactoryBean.java
  59. +107 −0 ...main/java/org/springframework/data/hadoop/admin/workflow/support/FileSystemWorkflowResourceFactoryBean.java
  60. +68 −0 src/main/java/org/springframework/data/hadoop/admin/workflow/support/WorkflowArtifacts.java
  61. +31 −0 ...ain/resources/META-INF/spring/batch/bootstrap/spring-hadoop-admin/spring-hadoop-admin-execution-context.xml
  62. +47 −0 src/main/resources/META-INF/spring/batch/jobs/spring-hadoop-admin-context.xml
  63. +16 −0 src/main/resources/batch-hsql.properties
  64. +3 −0  src/main/resources/core-site.xml
  65. +7 −0 src/main/resources/log4j.properties
  66. +51 −0 src/main/webapp/WEB-INF/web.xml
  67. +1 −0  src/main/webapp/index.jsp
  68. +44 −0 src/test/java/org/springframework/data/hadoop/admin/util/ClassLoaderTest.java
  69. +100 −0 src/test/java/org/springframework/data/hadoop/admin/util/HadoopWorkflowDescriptorUtilsTest.java
  70. +61 −0 .../java/org/springframework/data/hadoop/admin/workflow/support/FileSystemWorkflowResourceFactoryBeanTest.java
  71. +9 −0 src/test/resources/context.xml
  72. +8 −0 src/test/resources/hadoop.properties
  73. +7 −0 src/test/resources/log4j.properties
9 .gitignore
View
@@ -4,3 +4,12 @@
*.jar
*.war
*.ear
+.classpath
+.project
+.gradle
+.settings
+bin
+build
+target
+
+
16 CLI/.gitignore
View
@@ -0,0 +1,16 @@
+*.class
+
+# Package Files #
+*.jar
+*.war
+*.ear
+.classpath
+.project
+.gradle
+.settings
+bin
+build
+target
+.springBeans
+
+
14 CLI/.springBeans
View
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beansProjectDescription>
+ <version>1</version>
+ <pluginVersion><![CDATA[2.9.1.201203220057-RELEASE]]></pluginVersion>
+ <configSuffixes>
+ <configSuffix><![CDATA[xml]]></configSuffix>
+ </configSuffixes>
+ <enableImports><![CDATA[false]]></enableImports>
+ <configs>
+ <config>src/main/resources/rest-context.xml</config>
+ </configs>
+ <configSets>
+ </configSets>
+</beansProjectDescription>
24 CLI/README.txt
View
@@ -0,0 +1,24 @@
+
+Make sure Spring Hadoop Admin is running:
+open new terminal window, and change to "spring-hadoop-admin" root directory.
+$ ./gradlew jettyRun
+
+
+
+1. build:
+$ mvn clean package
+
+2. run
+$ java -jar target/admin-cli-1.0.0.BUILD-SNAPSHOT.jar
+
+3. set Spring Hadoop Admin service URl:
+springHadoopAdmin>target http://localhost:8081/spring-hadoop-admin
+
+4. list jobs:
+springHadoopAdmin>jobs-all
+
+5. run jobs:
+springHadoopAdmin>job-execute --jobName {jobName}
+
+
+
32 CLI/build.gradle
View
@@ -0,0 +1,32 @@
+description = 'Spring Hadoop Admin CLI'
+
+apply plugin: 'base'
+apply plugin: 'java'
+
+apply plugin: 'eclipse'
+apply plugin: 'idea'
+
+repositories {
+ mavenLocal()
+ mavenCentral()
+ // Public Spring artefacts
+ maven{
+ url "http://repo.springsource.org/release"
+ url "http://repo.springsource.org/milestone"
+ url "http://repo.springsource.org/snapshot"
+ url "http://oss.sonatype.org/content/repositories/snapshots"
+ url "http://springframework.svn.sourceforge.net/svnroot/springframework/repos/repo-ext/"
+ url "http://www.datanucleus.org/downloads/maven2/"
+ url "http://conjars.org/repo"
+ }
+}
+
+dependencies {
+ compile "org.springframework.shell:spring-shell:$springShellVersion"
+
+ testCompile "junit:junit:$junitVersion"
+
+}
+
+
+defaultTasks 'clean', 'build'
6 CLI/gradle.properties
View
@@ -0,0 +1,6 @@
+springShellVersion = 1.0.0.BUILD-SNAPSHOT
+
+junitVersion = 4.8.1
+
+version = 1.0.0.BUILD-SNAPSHOT
+
132 CLI/pom.xml
View
@@ -0,0 +1,132 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>org.springframework.data.hadoop</groupId>
+ <artifactId>admin-cli</artifactId>
+ <version>1.0.0.BUILD-SNAPSHOT</version>
+ <packaging>jar</packaging>
+
+ <name>spring-hadoop-admin-cli</name>
+ <url>http://maven.apache.org</url>
+
+ <properties>
+ <spring.shell.version>1.0.0.BUILD-SNAPSHOT</spring.shell.version>
+ <jar.mainclass>org.springframework.shell.Bootstrap</jar.mainclass>
+ </properties>
+
+ <dependencies>
+
+ <dependency>
+ <groupId>org.springframework.shell</groupId>
+ <artifactId>spring-shell</artifactId>
+ <version>${spring.shell.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-configuration</groupId>
+ <artifactId>commons-configuration</artifactId>
+ <version>1.8</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-web</artifactId>
+ <version>3.1.1.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-core-asl</artifactId>
+ <version>1.9.6</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ <version>1.9.6</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.batch</groupId>
+ <artifactId>spring-batch-admin-manager</artifactId>
+ <version>1.2.1.RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.batch</groupId>
+ <artifactId>spring-batch-admin-resources</artifactId>
+ <version>1.2.1.RELEASE</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.5</source>
+ <target>1.5</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/lib</outputDirectory>
+ <overWriteReleases>true</overWriteReleases>
+ <overWriteSnapshots>true</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifest>
+ <addClasspath>true</addClasspath>
+ <useUniqueVersions>false</useUniqueVersions>
+ <classpathPrefix>lib/</classpathPrefix>
+ <mainClass>${jar.mainclass}</mainClass>
+ </manifest>
+ <manifestEntries>
+ <version>${project.version}</version>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+
+ </build>
+
+
+ <repositories>
+ <!-- jline 1.0.S2-B is here http://shrub.appspot.com/spring-roo-repository.springsource.org/release/net/sourceforge/jline/jline/1.0.S2-B/ -->
+ <repository>
+ <id>spring-roo-repository</id>
+ <name>Spring Roo Maven Repository</name>
+ <url>http://spring-roo-repository.springsource.org/release</url>
+ </repository>
+
+ <repository>
+ <id>spring-maven-snapshot</id>
+ <snapshots>
+ <enabled>true</enabled>
+ <!-- <updatePolicy>always</updatePolicy> -->
+ </snapshots>
+ <name>Springframework Maven SNAPSHOT Repository</name>
+ <url>http://maven.springframework.org/snapshot</url>
+ </repository>
+
+ <repository>
+ <id>spring-maven-milestone</id>
+ <name>Springframework Maven Milestone Repository</name>
+ <url>http://maven.springframework.org/milestone</url>
+ </repository>
+ </repositories>
+</project>
100 CLI/spring-hadoop-admin.log
View
@@ -0,0 +1,100 @@
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:02:16
+help
+// [failed] executions-all
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:04:01
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:04:27
+target --url http://localhost:8081/spring-hadoop-admin/
+info
+executions-all
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:06:20
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:06:33
+jobs-all
+jobs-name --jobName wordcount-withscript-job
+job-execute --jobName wordcount-withscript-job
+job-execute --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:15:46
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:15:54
+job-execute --jobName wordcount-withscript-job
+help
+executions-all
+jobs-all
+executions-name --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:26:41
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:26:49
+help
+executions-all
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:28:38
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:28:48
+jobs-all
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:45:33
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:45:41
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 13:59:29
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 13:59:37
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 15:05:20
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 15:06:36
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 15:07:58
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 15:12:33
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 15:12:55
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 15:13:39
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 15:13:50
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 15:15:14
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 15:44:48
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-08 15:53:26
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-08 17:02:59
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-09 11:27:30
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-09 11:40:49
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-09 11:40:59
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-09 11:42:43
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-09 11:43:44
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-09 11:47:20
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-09 11:47:28
+help
+jobs-all
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+// [failed] launch-job --jobName wordcount-withscript-job
+info
+target --url http://localhost:8080/spring-hadoop-admin
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-09 12:16:43
+// Spring Roo UNKNOWN VERSION log opened at 2012-05-09 12:16:52
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+launch-job --jobName wordcount-withscript-job
+quit
+// Spring Roo UNKNOWN VERSION log closed at 2012-05-09 13:03:31
1  CLI/spring-hadoop-admin.properties
View
@@ -0,0 +1 @@
+targetUrl = http://localhost:8080/spring-hadoop-admin
116 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/BaseCommand.java
View
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.commands;
+
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+import org.springframework.data.hadoop.admin.cli.util.Log;
+import org.springframework.data.hadoop.admin.cli.util.PropertyUtil;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class BaseCommand {
+
+ private String commandURL = "jobs.json";
+
+ /**
+ * call rest service with "Get"
+ */
+ public void callGetService() {
+ RestTemplate template = getRestTemplate();
+ String json = template.getForObject(getCommandUrl(), String.class);
+ Log.show(json);
+ }
+
+
+ /**
+ * call rest service with "Post"
+ * @param <T>
+ */
+ public <T> void callPostService(T object) {
+ RestTemplate template = getRestTemplate();
+// String message = template.postForObject(getCommandUrl(), object, String.class);
+ HttpEntity<T> entity = new HttpEntity<T>(object);
+ ResponseEntity<String> response = template.exchange(getCommandUrl(), HttpMethod.POST, entity, String.class);
+ String message = response.getBody();
+ if(message != null){
+ Log.show(message);
+ }
+ }
+
+ /**
+ * call rest service with "Delete"
+ */
+ public void callDeleteService() {
+ RestTemplate template = getRestTemplate();
+ template.delete(getCommandUrl());
+ }
+
+ /**
+ * get RestTempate from xml Beans.
+ *
+ * @return
+ */
+ private RestTemplate getRestTemplate() {
+ ApplicationContext context = new ClassPathXmlApplicationContext("rest-context.xml");
+ RestTemplate template = context.getBean("restTemplate", RestTemplate.class);
+ return template;
+ }
+
+ /**
+ * get command URL
+ *
+ * @return
+ */
+ private String getCommandUrl() {
+ try {
+ String serviceUrl = PropertyUtil.getTargetURl();
+ if(serviceUrl == null || serviceUrl.length() == 0){
+ Log.error("you must set Spring Hadoop Admin service URL first by running target command");
+ }
+ if (!serviceUrl.endsWith("/")) {
+ serviceUrl += "/";
+ }
+ serviceUrl += getCommandURL();
+ return serviceUrl;
+ } catch (Exception e) {
+ Log.error("get service url failed. " + e.getMessage());
+ }
+ return null;
+ }
+
+ /**
+ * @return the commandURL
+ */
+ public String getCommandURL() {
+ return commandURL;
+ }
+
+ /**
+ * @param commandURL the commandURL to set
+ */
+ public void setCommandURL(String commandURL) {
+ this.commandURL = commandURL;
+ }
+
+
+}
142 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/ExecutionsCommand.java
View
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.commands;
+
+import java.util.Date;
+
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CliOption;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.stereotype.Component;
+import org.springframework.util.LinkedMultiValueMap;
+import org.springframework.util.MultiValueMap;
+
+/**
+ * Operations for "Executions"
+ *
+ * @author Jarred Li
+ * @since 1.0
+ */
+@Component
+public class ExecutionsCommand extends BaseCommand implements CommandMarker {
+
+ /**
+ * get all executions
+ *
+ */
+ @CliCommand(value = "executions-all", help = "get all job executions, in order of most recent to least")
+ public void getExecutions() {
+ super.setCommandURL("jobs/executions.json");
+ super.callGetService();
+ }
+
+ /**
+ * get the job executions for the specified jobName
+ *
+ * @param jobName The job name
+ */
+ @CliCommand(value = "executions-by-name", help = "List the JobExecutions for the job name provided")
+ public void getJobExecutions(@CliOption(key = { "jobName" }, mandatory = true, help = "Job Name") final String jobName) {
+ String url = "jobs/";
+ url += jobName;
+ url += "/executions.json";
+ super.setCommandURL(url);
+ super.callGetService();
+ }
+
+ /**
+ * get the job executions for the specified jobInstance
+ *
+ * @param jobName The job name
+ * @param jobInstanceId The job instance Id
+ */
+ @CliCommand(value = "executions-by-instanceId", help = "List the JobExecutions for the job instance with the id provided")
+ public void getJobInstanceExecutions(@CliOption(key = { "jobName" }, mandatory = true, help = "Job Name") final String jobName,
+ @CliOption(key = { "jobInstanceId" }, mandatory = true, help = "Job Instance Id") final String jobInstanceId) {
+ String url = "jobs/";
+ url += jobName;
+ url += "/";
+ url += jobInstanceId;
+ url += "/executions.json";
+ super.setCommandURL(url);
+ super.callGetService();
+ }
+
+
+ /**
+ * get one job execution with the id provided
+ *
+ * @param jobExecutionId
+ */
+ @CliCommand(value = "execution-by-id", help = "Show the JobExecution with the id provided")
+ public void getExecution(@CliOption(key = { "jobExecutionId" }, mandatory = true, help = "Job Execution Id") final String jobExecutionId) {
+ String url = "jobs/executions/";
+ url += jobExecutionId;
+ url += ".json";
+ super.setCommandURL(url);
+ super.callGetService();
+ }
+
+
+ /**
+ * stop all executions
+ *
+ */
+ @CliCommand(value = "executions-stop-all", help = "Stop all job executions")
+ public void stopExecutions() {
+ super.setCommandURL("jobs/executions.json");
+ super.callDeleteService();
+ }
+
+ /**
+ * restart the job executions
+ *
+ * @param jobName
+ * @param jobInstanceId
+ */
+ @CliCommand(value = "execution-restart-by-instanceId", help = "restart the JobExecutions for the job instance with the id provided")
+ public void restartJobInstanceExecutions(@CliOption(key = { "jobName" }, mandatory = true, help = "Job Name") final String jobName,
+ @CliOption(key = { "jobInstanceId" }, mandatory = true, help = "Job Instance Id") final String jobInstanceId) {
+ String url = "jobs/";
+ url += jobName;
+ url += "/";
+ url += jobInstanceId;
+ url += "/executions.json";
+ super.setCommandURL(url);
+ Date now = new Date();
+ MultiValueMap<String, Date> mvm = new LinkedMultiValueMap<String, Date>();
+ mvm.add("date", now);
+ super.callPostService(mvm);
+ }
+
+ /**
+ * stop one execution
+ *
+ * @param jobExecutionId the execution to be stoped
+ *
+ */
+ @CliCommand(value = "execution-stop-by-id", help = "stop the JobExecution with the id provided")
+ public void stopExecution(@CliOption(key = { "jobExecutionId" }, mandatory = true, help = "Job Execution Id") final String jobExecutionId) {
+ String url = "jobs/executions/";
+ url += jobExecutionId;
+ url += ".json";
+ super.setCommandURL(url);
+ super.callDeleteService();
+ }
+
+
+
+}
42 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/InfoCommand.java
View
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.commands;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.springframework.data.hadoop.admin.cli.util.Log;
+import org.springframework.data.hadoop.admin.cli.util.PropertyUtil;
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.stereotype.Component;
+
+/**
+ * @author Jarred Li
+ *
+ */
+@Component
+public class InfoCommand implements CommandMarker {
+
+ @CliCommand(value = "info", help = "list Spring Hadoop Admin CLI information")
+ public void getCLIInfo() {
+ try {
+ String serviceUrl = PropertyUtil.getTargetURl();
+ Log.show(" service url: " + serviceUrl);
+ } catch (ConfigurationException e) {
+ Log.error("set target url failed. " + e.getMessage());
+ }
+ }
+
+}
73 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/JobsCommand.java
View
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.commands;
+
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CliOption;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.stereotype.Component;
+import org.springframework.util.LinkedMultiValueMap;
+import org.springframework.util.MultiValueMap;
+
+/**
+ * @author Jarred Li
+ *
+ */
+@Component
+public class JobsCommand extends BaseCommand implements CommandMarker {
+
+ private static int count = 0;
+
+ /**
+ * list all jobs
+ */
+ @CliCommand(value = "jobs-all", help = "list all jobs information")
+ public void getJobs() {
+ super.setCommandURL("jobs.json");
+ super.callGetService();
+ }
+
+ /**
+ * list job by name
+ *
+ * @param jobName
+ */
+ @CliCommand(value = "jobs-by-name", help = "list jobs information by name")
+ public void getJobsByName(@CliOption(key = { "jobName" }, mandatory = true, help = "Job Name") final String jobName) {
+ String url = "jobs/";
+ url += jobName;
+ url += ".json";
+ super.setCommandURL(url);
+ super.callGetService();
+ }
+
+ /**
+ * launch job
+ *
+ * @param jobName
+ */
+ @CliCommand(value = "launch-job", help = "execute job")
+ public void executeJob(@CliOption(key = { "jobName" }, mandatory = true, help = "Job Name") final String jobName) {
+ String url = "jobs/";
+ url += jobName;
+ url += ".json";
+ super.setCommandURL(url);
+ MultiValueMap<String, String> mvm = new LinkedMultiValueMap<String, String>();
+ mvm.add("jobParameters", "fail=false, id=" + count++);
+ super.callPostService(mvm);
+ }
+
+}
41 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/commands/TargetCommand.java
View
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.commands;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.springframework.data.hadoop.admin.cli.util.Log;
+import org.springframework.data.hadoop.admin.cli.util.PropertyUtil;
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CliOption;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.stereotype.Component;
+
+/**
+ * @author Jarred Li
+ *
+ */
+@Component
+public class TargetCommand implements CommandMarker {
+
+ @CliCommand(value = "target", help = "connect to Spring Hadoop Admin server")
+ public void target(@CliOption(key = { "url" }, mandatory = true, help = "Spring Hadoop Admin service URL") final String url) {
+ try {
+ PropertyUtil.setTargetUrl(url);
+ } catch (ConfigurationException e) {
+ Log.error("set target url failed. " + e.getMessage());
+ }
+ }
+}
73 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/plugin/SpringHadoopAdminBannerProvider.java
View
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.plugin;
+
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.roo.support.util.StringUtils;
+import org.springframework.shell.plugin.support.DefaultBannerProvider;
+import org.springframework.stereotype.Component;
+
+/**
+ * @author Jarred Li
+ *
+ */
+@Component
+@Order(Ordered.HIGHEST_PRECEDENCE)
+public class SpringHadoopAdminBannerProvider extends DefaultBannerProvider
+ implements CommandMarker {
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.BannerProvider#getBanner()
+ */
+ @CliCommand(value = { "version" }, help = "Displays current CLI version")
+ public String getBanner() {
+ StringBuffer buf = new StringBuffer();
+ buf.append("=======================================" + StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("* Spring Hadoop Admin *" +StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("=======================================" + StringUtils.LINE_SEPARATOR);
+ buf.append("Verson:" + this.getVersion());
+ return buf.toString();
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.BannerProvider#getVersion()
+ */
+ public String getVersion() {
+ return "1.0.0";
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.BannerProvider#getWelcomMessage()
+ */
+ public String getWelcomMessage() {
+ return "Welcome to Spring Hadoop Admin CLI";
+ }
+
+ @Override
+ public String name() {
+ return "spring hadoop admin banner provider";
+ }
+
+
+}
42 ...main/java/org/springframework/data/hadoop/admin/cli/plugin/SpringHadoopAdminHistoryFileNameProvider.java
View
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.data.hadoop.admin.cli.plugin;
+
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider;
+import org.springframework.stereotype.Component;
+
+/**
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+@Order(Ordered.HIGHEST_PRECEDENCE)
+public class SpringHadoopAdminHistoryFileNameProvider extends DefaultHistoryFileNameProvider{
+
+ public String getHistoryFileName() {
+ return "spring-hadoop-admin.log";
+ }
+
+ @Override
+ public String name() {
+ return "spring hadoop admin history file name provider";
+ }
+
+}
43 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/plugin/SpringHadoopAdminPromptProvider.java
View
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.plugin;
+
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.shell.plugin.support.DefaultPromptProvider;
+import org.springframework.stereotype.Component;
+
+/**
+ * @author Jarred Li
+ *
+ */
+@Component
+@Order(Ordered.HIGHEST_PRECEDENCE)
+public class SpringHadoopAdminPromptProvider extends DefaultPromptProvider {
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.PromptProvider#getPromptText()
+ */
+ public String getPromptText() {
+ return "springHadoopAdmin>";
+ }
+
+ @Override
+ public String name() {
+ return "spring hadoop admin prompt provider";
+ }
+
+}
35 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/util/Log.java
View
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.util;
+
+import org.springframework.roo.support.util.Assert;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class Log {
+
+ public static void error(String message) {
+ Assert.notNull(message, "the message is null");
+ System.err.println(message);
+ }
+
+ public static void show(String message) {
+ Assert.notNull(message, "the message is null");
+ System.out.println(message);
+ }
+}
59 CLI/src/main/java/org/springframework/data/hadoop/admin/cli/util/PropertyUtil.java
View
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.cli.util;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class PropertyUtil {
+
+ private static final Log logger = LogFactory.getLog(PropertyUtil.class);
+
+ public static String adminPropertyFileName = "spring-hadoop-admin.properties";
+
+ static{
+ File f = new File(adminPropertyFileName);
+ if(!f.exists()){
+ try {
+ f.createNewFile();
+ } catch (IOException e) {
+ logger.error("create property file failed", e);
+ }
+ }
+ }
+
+ public static void setTargetUrl(String targetUrl) throws ConfigurationException {
+ PropertiesConfiguration config = new PropertiesConfiguration(adminPropertyFileName);
+ config.setProperty("targetUrl", targetUrl);
+ config.save();
+ }
+
+ public static String getTargetURl() throws ConfigurationException {
+ String result = null;
+ PropertiesConfiguration config = new PropertiesConfiguration(adminPropertyFileName);
+ result = config.getString("targetUrl");
+ return result;
+ }
+}
12 CLI/src/main/resources/META-INF/spring/spring-shell-plugin.xml
View
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd">
+
+ <context:component-scan base-package="org.springframework.data.hadoop.admin.cli">
+ <context:include-filter type="regex" expression="(commands|plugin)\..*"/>
+ </context:component-scan>
+
+</beans>
23 CLI/src/main/resources/rest-context.xml
View
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
+
+ <bean id="restTemplate" class="org.springframework.web.client.RestTemplate">
+ <!--
+ <property name="messageConverters">
+ <list>
+ <ref bean="stringHttpMessageConverter"/>
+ <ref bean="jsonConverter" />
+ </list>
+ </property>
+ -->
+ </bean>
+ <bean id="jsonConverter"
+ class="org.springframework.http.converter.json.MappingJacksonHttpMessageConverter">
+ <property name="supportedMediaTypes" value="application/json" />
+ </bean>
+ <bean id="stringHttpMessageConverter"
+ class="org.springframework.http.converter.StringHttpMessageConverter" />
+
+</beans>
28 README.md
View
@@ -1,4 +1,30 @@
spring-hadoop-admin
===================
-Web application and API for managing and monitoring Spring Hadoop
+admin application for spring hadoop.
+1. checkout the code
+$git checkout git@github.com:leejianwei/spring-hadoop-admin.git
+
+2. build & run
+
+$./gradlew jettyRun
+
+3. Upload spring hadoop example
+
+3a.In the browser, open "http://localhost:8081/spring-hadoop-admin"
+
+3b.Click "File" menu
+
+3c.In the "Server Path" textfield, input "hadoopjob/job1"
+3d.Click "Browse..." button, and nagivate to "spring-hadoop-admin/samples/wordcount-batch" folder,
+3e.Select "hadoop-examples-1.0.0.jar
+3f.Cick "Upload" button,
+
+repeat 3c to 3f and upload "data.jar", "hadoop.properties" and "wordcount-context.xml" respectively.
+
+
+4. check uploaded jobs
+
+4a. Click "Jobs" mens, you should see the uploaded jobs. Run the job following Spring Batch Admin guide.
+
+
60 build.gradle
View
@@ -0,0 +1,60 @@
+description = 'Spring Hadoop Admin'
+
+apply plugin: 'base'
+apply plugin: 'java'
+apply plugin: 'war'
+apply plugin: 'jetty'
+
+apply plugin: 'eclipse-wtp'
+apply plugin: 'idea'
+
+repositories {
+ mavenLocal()
+ mavenCentral()
+ // Public Spring artefacts
+ maven{
+ url "http://repo.springsource.org/release"
+ url "http://repo.springsource.org/milestone"
+ url "http://repo.springsource.org/snapshot"
+ url "http://oss.sonatype.org/content/repositories/snapshots"
+ url "http://springframework.svn.sourceforge.net/svnroot/springframework/repos/repo-ext/"
+ url "http://www.datanucleus.org/downloads/maven2/"
+ url "http://conjars.org/repo"
+ }
+}
+
+dependencies {
+ compile "org.springframework.data:spring-data-hadoop:$springHadoopVersion"
+ compile "org.apache.hadoop:hadoop-core:$hadoopVersion"
+
+ compile "org.springframework.batch:spring-batch-admin-manager:$springBatchAdminVersion"
+ compile "org.springframework.batch:spring-batch-admin-resources:$springBatchAdminVersion"
+
+ compile "commons-configuration:commons-configuration:$commonsConfigVersion"
+ runtime "commons-lang:commons-lang:$commonsLangVersion"
+ runtime "org.codehaus.groovy:groovy:$groovyVersion"
+ runtime "hsqldb:hsqldb:$hsqldbVersion"
+ runtime "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion"
+
+ //pig
+ runtime "org.apache.pig:pig:$pigVersion"
+ runtime "jline:jline:$jlineVersion"
+ runtime "org.antlr:antlr:$antlrVersion"
+
+ //hive
+ runtime "javax.jdo:jdo2-api:$jdoVersion"
+ runtime "org.apache.hive:hive-service:$hiveVersion"
+ runtime "org.apache.hive:hive-metastore:$hiveVersion"
+ runtime "org.apache.hive:hive-jdbc:$hiveVersion"
+
+ //log
+ runtime "org.slf4j:slf4j-api:$slf4jVersion"
+ runtime "org.slf4j:slf4j-log4j12:$slf4jVersion"
+ runtime "org.slf4j:jcl-over-slf4j:$slf4jVersion"
+
+ testCompile "junit:junit:$junitVersion"
+}
+
+httpPort = 8081
+
+defaultTasks 'clean', 'build'
66 context.xml
View
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
+ xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:p="http://www.springframework.org/schema/p"
+ xmlns:task="http://www.springframework.org/schema/task"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
+ http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
+
+
+ <context:property-placeholder location="hadoop.properties" />
+
+ <hdp:configuration>
+ fs.default.name=${hd.fs}
+ mapred.job.tracker=${mapred.job.tracker}
+ </hdp:configuration>
+
+ <bean name="pathUtils" class="org.springframework.data.hadoop.util.PathUtils"
+ p:rootPath="${wordcount.output.path}" p:pathFormat="year/month/day/hour/minute/second"
+ p:appendUUID="true" />
+
+ <bean id="wc-job"
+ class="org.springframework.data.hadoop.mapreduce.JobFactoryBean"
+ p:configuration-ref="hadoop-configuration" p:input-paths="${wordcount.input.path}"
+ p:output-path="#{@pathUtils.getTimeBasedPathFromRoot()}" p:mapper="org.apache.hadoop.examples.WordCount.TokenizerMapper"
+ p:reducer="org.apache.hadoop.examples.WordCount.IntSumReducer"
+ p:jar="${wordcount.jar.path}"
+ p:validate-paths="false" scope="prototype" />
+
+
+ <hdp:script id="clean-script" language="javascript">
+ // 'hack' default permissions to make Hadoop work on Windows
+ if (java.lang.System.getProperty("os.name").startsWith("Windows")) {
+ // 0655 = -rwxr-xr-x
+ org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort(0655)
+ org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort(0655)
+ }
+
+ inputPath = "${wordcount.input.path}"
+ outputPath = "${wordcount.output.path}"
+ if (fsh.test(inputPath)) { fsh.rmr(inputPath) }
+ if (fsh.test(outputPath)) { fsh.rmr(outputPath) }
+
+ // copy using the streams directly (to be portable across envs)
+ inStream = cl.getResourceAsStream("data/nietzsche-chapter-1.txt")
+ org.apache.hadoop.io.IOUtils.copyBytes(inStream, fs.create(inputPath), cfg)
+ </hdp:script>
+
+ <!-- simple job runner -->
+ <bean id="runner" class="org.springframework.data.hadoop.mapreduce.JobRunner"
+ depends-on="clean-script" p:runAtStartup="false">
+ <property name="jobNames">
+ <set>
+ <value>wc-job</value>
+ </set>
+ </property>
+ </bean>
+
+ <task:scheduler id="myScheduler" pool-size="10" />
+ <task:scheduled-tasks scheduler="myScheduler">
+ <task:scheduled ref="runner" method="runJobs" cron="10 * * * * *" />
+ </task:scheduled-tasks>
+
+
+</beans>
20 gradle.properties
View
@@ -0,0 +1,20 @@
+junitVersion = 4.8.1
+hadoopVersion = 1.0.0
+pigVersion = 0.9.2
+jlineVersion = 1.0
+antlrVersion = 3.1.1
+hiveVersion = 0.8.1
+jdoVersion = 2.3-ec
+groovyVersion = 1.8.5
+jacksonVersion = 1.8.8
+hsqldbVersion=1.8.0.7
+commonsConfigVersion = 1.6
+commonsLangVersion = 2.4
+springHadoopVersion = 1.0.0.BUILD-SNAPSHOT
+slf4jVersion = 1.6.1
+
+#springBatchAdminVersion = 1.2.1.RELEASE
+springBatchAdminVersion = 1.2.2.BUILD-SNAPSHOT
+
+version = 1.0.0.BUILD-SNAPSHOT
+
6 gradle/wrapper/gradle-wrapper.properties
View
@@ -0,0 +1,6 @@
+#Wed Apr 25 17:59:38 CST 2012
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=http\://services.gradle.org/distributions/gradle-1.0-rc-2-bin.zip
164 gradlew
View
@@ -0,0 +1,164 @@
+#!/bin/bash
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn ( ) {
+ echo "$*"
+}
+
+die ( ) {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+esac
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched.
+if $cygwin ; then
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+fi
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/"
+APP_HOME="`pwd -P`"
+cd "$SAVED"
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query businessSystem maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ JAVA_OPTS="$JAVA_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
+function splitJvmOpts() {
+ JVM_OPTS=("$@")
+}
+eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
+JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+
+exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
90 gradlew.bat
View
@@ -0,0 +1,90 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windowz variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+if "%@eval[2+2]" == "4" goto 4NT_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+goto execute
+
+:4NT_args
+@rem Get arguments from the 4NT Shell from JP Software
+set CMD_LINE_ARGS=%$
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
168 pom.xml
View
@@ -0,0 +1,168 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>spring-hadoop-admin</groupId>
+ <artifactId>spring-hadoop-admin</artifactId>
+ <version>1.0.0.BUILD-SNAPSHOT</version>
+ <packaging>war</packaging>
+
+
+ <properties>
+ <springHadoopVersion>1.0.0.BUILD-SNAPSHOT</springHadoopVersion>
+ <hadoopVersion>1.0.0</hadoopVersion>
+ <springBatchAdminVersion>1.2.1.RELEASE</springBatchAdminVersion>
+ <commonsConfigVersion>1.6</commonsConfigVersion>
+ <commonsLangVersion>2.4</commonsLangVersion>
+ <hsqldbVersion>1.8.0.7</hsqldbVersion>
+ <junitVersion>4.8.1</junitVersion>
+ <jacksonVersion>1.8.1</jacksonVersion>
+ <groovyVersion>1.8.5</groovyVersion>
+ <pigVersion>0.9.2</pigVersion>
+ <jlineVersion>1.0</jlineVersion>
+ <antlrVersion>3.1.1</antlrVersion>
+ <hiveVersion>0.8.1</hiveVersion>
+ <jdoVersion>2.3-ec</jdoVersion>
+ <slf4jVersion>1.6.1</slf4jVersion>
+ </properties>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>maven-jetty-plugin</artifactId>
+ <configuration>
+ <contextPath>/spring-hadoop-admin</contextPath>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>${slf4jVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <version>${slf4jVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>jcl-over-slf4j</artifactId>
+ <version>${slf4jVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.data</groupId>
+ <artifactId>spring-data-hadoop</artifactId>
+ <version>${springHadoopVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>${hadoopVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.batch</groupId>
+ <artifactId>spring-batch-admin-manager</artifactId>
+ <version>${springBatchAdminVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.batch</groupId>
+ <artifactId>spring-batch-admin-resources</artifactId>
+ <version>${springBatchAdminVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-configuration</groupId>
+ <artifactId>commons-configuration</artifactId>
+ <version>${commonsConfigVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>${commonsLangVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>${hsqldbVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ <version>${jacksonVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.groovy</groupId>
+ <artifactId>groovy</artifactId>
+ <version>${groovyVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junitVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.pig</groupId>
+ <artifactId>pig</artifactId>
+ <version>${pigVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>${jlineVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr</artifactId>
+ <version>${antlrVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.jdo</groupId>
+ <artifactId>jdo2-api</artifactId>
+ <version>${jdoVersion}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-service</artifactId>
+ <version>${hiveVersion}</version>
+ <exclusions>
+ <exclusion>
+ <artifactId>org.apache.hadoop</artifactId>
+ <groupId>hadoop-core</groupId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-metastore</artifactId>
+ <version>${hiveVersion}</version>
+ <exclusions>
+ <exclusion>
+ <artifactId>org.apache.hadoop</artifactId>
+ <groupId>hadoop-core</groupId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-jdbc</artifactId>
+ <version>${hiveVersion}</version>
+ </dependency>
+ </dependencies>
+ <repositories>
+ <repository>
+ <id>datanucleus</id>
+ <url>http://www.datanucleus.org/downloads/maven2/</url>
+ </repository>
+ </repositories>
+</project>
8 samples/hive/hadoop.properties
View
@@ -0,0 +1,8 @@
+jar.path=
+
+wordcount.input.path=/user/hadoop/conf
+wordcount.output.path=/user/hadoop/output
+wordcount.jar.path=${jar.path}/hadoop-examples-1.0.0.jar
+
+hd.fs=hdfs://localhost:9000
+mapred.job.tracker=localhost:9001
69 samples/hive/hive-context.xml
View
@@ -0,0 +1,69 @@
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:batch="http://www.springframework.org/schema/batch"
+ xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:context="http://www.springframework.org/schema/context"
+ xmlns:c="http://www.springframework.org/schema/c" xmlns:p="http://www.springframework.org/schema/p"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.1.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
+
+ <context:property-placeholder location="hadoop.properties" />
+
+ <hdp:configuration>
+ fs.default.name=${hd.fs}
+ mapred.job.tracker=${mapred.job.tracker}
+ </hdp:configuration>
+
+
+ <hdp:hive-client host="localhost" port="10000">
+ </hdp:hive-client>
+
+
+ <job id="hiveJob" xmlns="http://www.springframework.org/schema/batch">
+ <step id="prepareData" next="initTable">
+ <tasklet ref="prepareDataTasklet" />
+ </step>
+ <step id="initTable" next="loadData">
+ <tasklet ref="initTableTasklet" />
+ </step>
+ <step id="loadData" next="showData">
+ <tasklet ref="loadDataScriptTasklet" />
+ </step>
+ <step id="showData">
+ <tasklet ref="showDataTasklet" />
+ </step>
+ </job>
+
+ <hdp:script-tasklet id="prepareDataTasklet">
+ <hdp:script language="groovy">
+ <![CDATA[
+ if (fsh.test("u.data")) {
+ fsh.rmr("u.data")
+ }
+ inStream = cl.getResourceAsStream("ml-100k/u.data")
+ org.apache.hadoop.io.IOUtils.copyBytes(inStream, fs.create("u.data"), cfg)
+ ]]>
+ </hdp:script>
+ </hdp:script-tasklet>
+
+ <hdp:hive-tasklet id="initTableTasklet"
+ hive-client-ref="hive-client">
+ <hdp:script location="classpath:script.sql" />
+ </hdp:hive-tasklet>
+
+ <hdp:hive-tasklet id="loadDataScriptTasklet"
+ hive-client-ref="hive-client">
+ <hdp:script>
+ LOAD DATA INPATH 'u.data' OVERWRITE INTO TABLE u_data;
+ </hdp:script>
+ </hdp:hive-tasklet>
+
+ <hdp:hive-tasklet id="showDataTasklet"
+ hive-client-ref="hive-client">
+ <hdp:script>
+ select movieid, avg(rating) from u_data group by movieid;
+ </hdp:script>
+ </hdp:hive-tasklet>
+
+
+</beans>
3  samples/hive/script.sql
View
@@ -0,0 +1,3 @@
+DROP TABLE u_data;
+CREATE EXTERNAL TABLE u_data(userid INT,movieid INT,rating INT,unixtime STRING)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
+
8 samples/pig/hadoop.properties
View
@@ -0,0 +1,8 @@
+jar.path=
+
+wordcount.input.path=/user/hadoop/conf
+wordcount.output.path=/user/hadoop/output
+wordcount.jar.path=${jar.path}/hadoop-examples-1.0.0.jar
+
+hd.fs=hdfs://localhost:9000
+mapred.job.tracker=localhost:9001
45 samples/pig/pig-context.xml
View
@@ -0,0 +1,45 @@
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:batch="http://www.springframework.org/schema/batch"
+ xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:context="http://www.springframework.org/schema/context"
+ xmlns:c="http://www.springframework.org/schema/c" xmlns:p="http://www.springframework.org/schema/p"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.1.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
+
+ <context:property-placeholder location="hadoop.properties" />
+
+ <hdp:configuration>
+ fs.default.name=${hd.fs}
+ mapred.job.tracker=${mapred.job.tracker}
+ </hdp:configuration>
+
+ <hdp:pig configuration-ref="hadoop-configuration" exec-type="MAPREDUCE"/>
+
+ <job id="pigJob" xmlns="http://www.springframework.org/schema/batch">
+ <step id="prepareData" next="loadData">
+ <tasklet ref="prepareDataTasklet" />
+ </step>
+ <step id="loadData">
+ <tasklet ref="loadDataTasklet" />
+ </step>
+ </job>
+
+ <hdp:script-tasklet id="prepareDataTasklet">
+ <hdp:script language="groovy">
+ <![CDATA[
+ if (fsh.test("u.data")) {
+ fsh.rmr("u.data")
+ }
+ inStream = cl.getResourceAsStream("ml-100k/u.data")
+ org.apache.hadoop.io.IOUtils.copyBytes(inStream, fs.create("u.data"), cfg)
+ ]]>
+ </hdp:script>
+ </hdp:script-tasklet>
+
+ <hdp:pig-tasklet id="loadDataTasklet">
+ <hdp:script location="classpath:script.pig" />
+ </hdp:pig-tasklet>
+
+
+</beans>
4 samples/pig/script.pig
View
@@ -0,0 +1,4 @@
+a = load 'u.data' as (userid:int, movieid:int, rating:int, unixtime:chararray);
+good_rating = filter a by rating > 3;
+user_rating = group good_rating by movieid;
+dump user_rating;
14 samples/wordcount-batch-notification/.springBeans
View
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beansProjectDescription>
+ <version>1</version>
+ <pluginVersion><![CDATA[2.9.1.201203220057-RELEASE]]></pluginVersion>
+ <configSuffixes>
+ <configSuffix><![CDATA[xml]]></configSuffix>
+ </configSuffixes>
+ <enableImports><![CDATA[false]]></enableImports>
+ <configs>
+ <config>src/test/resources/mail-context.xml</config>
+ </configs>
+ <configSets>
+ </configSets>
+</beansProjectDescription>
37 samples/wordcount-batch-notification/build.gradle
View
@@ -0,0 +1,37 @@
+description = 'Spring Hadoop Admin Examples - Email Notification'
+
+apply plugin: 'base'
+apply plugin: 'java'
+
+apply plugin: 'eclipse'
+apply plugin: 'idea'
+
+repositories {
+ mavenLocal()
+ mavenCentral()
+ // Public Spring artefacts
+ maven{
+ url "http://repo.springsource.org/release"
+ url "http://repo.springsource.org/milestone"
+ url "http://repo.springsource.org/snapshot"
+ url "http://oss.sonatype.org/content/repositories/snapshots"
+ url "http://springframework.svn.sourceforge.net/svnroot/springframework/repos/repo-ext/"
+ url "http://www.datanucleus.org/downloads/maven2/"
+ url "http://conjars.org/repo"
+ }
+}
+
+dependencies {
+ compile "org.springframework:spring-context-support:$springVersion"
+ compile "org.springframework.batch:spring-batch-core:$springBatchVersion"
+
+ runtime "javax.mail:mail:1.4.5"
+ runtime "jaf:activation:1.0.2"
+ runtime "hsqldb:hsqldb:$hsqldbVersion"
+
+ testCompile "junit:junit:$junitVersion"
+ testCompile "org.springframework:spring-test:$springVersion"
+}
+
+
+defaultTasks 'clean', 'build'
7 samples/wordcount-batch-notification/gradle.properties
View
@@ -0,0 +1,7 @@
+springVersion = 3.0.7.RELEASE
+springBatchVersion = 2.1.7.RELEASE
+hsqldbVersion=1.8.0.7
+junitVersion = 4.8.1
+
+version = 1.0.0.BUILD-SNAPSHOT
+
8 samples/wordcount-batch-notification/hadoop.properties
View
@@ -0,0 +1,8 @@
+jar.path=
+
+wordcount.input.path=/user/hadoop/conf
+wordcount.output.path=/user/hadoop/output
+wordcount.jar.path=${jar.path}/hadoop-examples-1.0.0.jar
+
+hd.fs=hdfs://localhost:9000
+mapred.job.tracker=localhost:9001
91 ...t-batch-notification/src/main/java/org/springframework/data/hadoop/admin/examples/EmailNotification.java
View
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.examples;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.mail.SimpleMailMessage;
+import org.springframework.mail.javamail.JavaMailSender;
+import org.springframework.util.Assert;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class EmailNotification implements JobExecutionListener, InitializingBean {
+
+ private Log logger = LogFactory.getLog(EmailNotification.class);
+
+ private JavaMailSender mailSender;
+
+ private SimpleMailMessage templateMessage;
+
+
+ /**
+ * @param templateMessage the templateMessage to set
+ */
+ public void setTemplateMessage(SimpleMailMessage templateMessage) {
+ this.templateMessage = templateMessage;
+ }
+
+ /**
+ * @param mailSender the mailSender to set
+ */
+ public void setMailSender(JavaMailSender mailSender) {
+ this.mailSender = mailSender;
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.batch.core.JobExecutionListener#beforeJob(org.springframework.batch.core.JobExecution)
+ */
+ @Override
+ public void beforeJob(JobExecution jobExecution) {
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.batch.core.JobExecutionListener#afterJob(org.springframework.batch.core.JobExecution)
+ */
+ @Override
+ public void afterJob(JobExecution jobExecution) {
+ logger.info("afterJob enter");
+ SimpleMailMessage message = new SimpleMailMessage(templateMessage);
+ message.setSubject("Spring Batch Job Status");
+ message.setText("Job " + jobExecution.getJobInstance().getJobName() + " completed. Status is: "
+ + jobExecution.getStatus());
+ try {
+ mailSender.send(message);
+ } catch (Throwable t) {
+ logger.error("send mail failed", t);
+ }
+ logger.info("sent mail");
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
+ */
+ @Override
+ public void afterPropertiesSet() throws Exception {
+ Assert.notNull(mailSender, "mail sender must be set");
+ Assert.notNull(this.templateMessage, "template message must be set");
+
+
+ }
+
+}
49 ...tch-notification/src/test/java/org/springframework/data/hadoop/admin/examples/EmailNotificationTest.java
View
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.admin.examples;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobInstance;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+/**
+ * @author Jarred Li
+ *
+ */
+@ContextConfiguration("/mail-context.xml")
+@RunWith(SpringJUnit4ClassRunner.class)
+public class EmailNotificationTest {
+
+ @Autowired
+ private ApplicationContext context;
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.admin.examples.EmailNotification#afterJob(org.springframework.batch.core.JobExecution)}.
+ */
+ @Test
+ public void testAfterJob() {
+ EmailNotification notification = context.getBean("emailNotification", EmailNotification.class);
+ JobExecution jobExecution = new JobExecution(1l);
+ jobExecution.setJobInstance(new JobInstance(null, null, "aaa"));
+ notification.afterJob(jobExecution);
+ }
+
+}
30 samples/wordcount-batch-notification/src/test/resources/mail-context.xml
View
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd">
+
+ <bean id="emailNotification"
+ class="org.springframework.data.hadoop.admin.examples.EmailNotification">
+ <property name="mailSender" ref="mailSender" />
+ <property name="templateMessage" ref="templateMessage" />
+ </bean>
+
+ <bean id="mailSender" class="org.springframework.mail.javamail.JavaMailSenderImpl">
+ <property name="host" value="smtp.126.com"></property>
+ <property name="username" value="leejianwei"></property>
+ <property name="password" value="******"></property>
+ <property name="javaMailProperties">
+ <props>
+ <prop key="mail.smtp.auth">true</prop>
+ <prop key="mail.smtp.connectiontimeout">5000</prop>
+ <prop key="mail.smtp.timeout">5000</prop>
+ </props>
+ </property>
+ </bean>
+
+ <bean id="templateMessage" class="org.springframework.mail.SimpleMailMessage">
+ <property name="from" value="leejianwei@126.com"></property>
+ <property name="to" value="jiali@vmware.com"></property>
+ </bean>
+
+</beans>
93 samples/wordcount-batch-notification/wordcount-notification-context.xml
View
@@ -0,0 +1,93 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
+ xmlns:beans="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:batch="http://www.springframework.org/schema/batch"
+ xmlns:p="http://www.springframework.org/schema/p"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.1.xsd
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
+
+ <context:property-placeholder location="hadoop.properties"
+ ignore-resource-not-found="true" ignore-unresolvable="true" />
+
+
+ <configuration>
+ fs.default.name=${hd.fs}
+ mapred.job.tracker=${mapred.job.tracker}
+ </configuration>
+
+ <batch:job id="wordcount-notification-job">
+ <batch:step id="import" next="wordcount">
+ <batch:tasklet ref="script-tasklet"/>
+ </batch:step>
+ <batch:step id="wordcount">
+ <batch:tasklet ref="wordcount-tasklet" />
+ </batch:step>
+ <batch:listeners>
+ <batch:listener>
+ <beans:ref bean="emailNotification"/>
+ </batch:listener>
+ </batch:listeners>
+ </batch:job>
+
+ <tasklet id="wordcount-tasklet" job-ref="wc-job"/>
+
+ <beans:bean id="emailNotification" class="org.springframework.data.hadoop.admin.examples.EmailNotification">
+ <beans:property name="mailSender" ref="mailSender"/>
+ <beans:property name="templateMessage" ref="templateMessage"/>
+ </beans:bean>
+
+ <beans:bean id="mailSender" class="org.springframework.mail.javamail.JavaMailSenderImpl">
+ <beans:property name="host" value="zimbra.vmware.com"></beans:property>
+ </beans:bean>
+
+ <beans:bean id="templateMessage" class="org.springframework.mail.SimpleMailMessage">
+ <beans:property name="from" value="jiali@vmware.com"></beans:property>
+ <beans:property name="to" value="leejianwei@126.com"></beans:property>
+ </beans:bean>
+
+ <beans:bean name="pathUtils" class="org.springframework.data.hadoop.util.PathUtils"
+ p:rootPath="/user/hadoop/output"
+ p:pathFormat="year/month/day/hour/minute/second"/>
+
+ <beans:bean id="wc-job" class="org.springframework.data.hadoop.mapreduce.JobFactoryBean"
+ p:configuration-ref="hadoop-configuration"
+ p:input-paths="/user/hadoop/input"
+ p:output-path="#{@pathUtils.getTimeBasedPathFromRoot()}"
+ p:mapper="org.apache.hadoop.examples.WordCount.TokenizerMapper"
+ p:reducer="org.apache.hadoop.examples.WordCount.IntSumReducer"
+ p:validate-paths="false"
+ p:jar="${wordcount.jar.path}"
+ scope="prototype"/>
+
+
+ <script-tasklet id="script-tasklet">
+ <script language="groovy">
+ // 'hack' default permissions to make Hadoop work on Windows
+ if (System.getProperty("os.name").startsWith("Windows")) {
+ // 0655 = -rwxr-xr-x
+ org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort((short) 0655);
+ org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort((short) 0655);
+ }
+
+
+ inputPath = "/user/hadoop/input"
+ outputPath = "/user/hadoop/output"
+ if (fsh.test(inputPath)) {
+ fsh.rmr(inputPath)
+ }
+ if (fsh.test(outputPath)) {
+ fsh.rmr(outputPath)
+ }
+
+ // copy using the streams directly (to be portable across envs)
+ inStream = cl.getResourceAsStream("data/nietzsche-chapter-1.txt")
+ org.apache.hadoop.io.IOUtils.copyBytes(inStream, fs.create(inputPath), cfg)
+ </script>
+ </script-tasklet>
+
+
+</beans:beans>
8 samples/wordcount-batch/hadoop.properties
View
@@ -0,0 +1,8 @@
+jar.path=
+
+wordcount.input.path=/user/hadoop/conf
+wordcount.output.path=/user/hadoop/output
+wordcount.jar.path=${jar.path}/hadoop-examples-1.0.0.jar
+
+hd.fs=hdfs://localhost:9000
+mapred.job.tracker=localhost:9001
74 samples/wordcount-batch/wordcount-context.xml
View
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
+ xmlns:beans="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:batch="http://www.springframework.org/schema/batch"
+ xmlns:p="http://www.springframework.org/schema/p"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.1.xsd
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
+
+ <context:property-placeholder location="hadoop.properties"
+ ignore-resource-not-found="true" ignore-unresolvable="true" />
+
+
+ <configuration>
+ fs.default.name=${hd.fs}
+ mapred.job.tracker=${mapred.job.tracker}
+ </configuration>
+
+ <batch:job id="wordcount-withscript-job">
+ <batch:step id="import" next="wordcount">
+ <batch:tasklet ref="script-tasklet"/>
+ </batch:step>
+ <batch:step id="wordcount">
+ <batch:tasklet ref="wordcount-tasklet" />
+ </batch:step>
+ </batch:job>
+
+ <tasklet id="wordcount-tasklet" job-ref="wc-job"/>
+
+ <beans:bean name="pathUtils" class="org.springframework.data.hadoop.util.PathUtils"
+ p:rootPath="/user/hadoop/output"
+ p:pathFormat="year/month/day/hour/minute/second"/>
+
+ <beans:bean id="wc-job" class="org.springframework.data.hadoop.mapreduce.JobFactoryBean"
+ p:configuration-ref="hadoop-configuration"
+ p:input-paths="/user/hadoop/input"
+ p:output-path="#{@pathUtils.getTimeBasedPathFromRoot()}"
+ p:mapper="org.apache.hadoop.examples.WordCount.TokenizerMapper"
+ p:reducer="org.apache.hadoop.examples.WordCount.IntSumReducer"
+ p:validate-paths="false"
+ p:jar="${wordcount.jar.path}"
+ scope="prototype"/>
+
+
+ <script-tasklet id="script-tasklet">
+ <script language="groovy">
+ // 'hack' default permissions to make Hadoop work on Windows
+ if (System.getProperty("os.name").startsWith("Windows")) {
+ // 0655 = -rwxr-xr-x
+ org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort((short) 0655);
+ org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort((short) 0655);
+ }
+
+
+ inputPath = "/user/hadoop/input"
+ outputPath = "/user/hadoop/output"
+ if (fsh.test(inputPath)) {
+ fsh.rmr(inputPath)
+ }
+ if (fsh.test(outputPath)) {
+ fsh.rmr(outputPath)
+ }
+
+ // copy using the streams directly (to be portable across envs)
+ inStream = cl.getResourceAsStream("data/nietzsche-chapter-1.txt")
+ org.apache.hadoop.io.IOUtils.copyBytes(inStream, fs.create(inputPath), cfg)
+ </script>
+ </script-tasklet>
+
+
+</beans:beans>
8 samples/wordcount-cron/hadoop.properties
View
@@ -0,0 +1,8 @@
+jar.path=
+
+wordcount.input.path=/user/hadoop/conf
+wordcount.output.path=/user/hadoop/output
+wordcount.jar.path=${jar.path}/hadoop-examples-1.0.0.jar
+
+hd.fs=hdfs://localhost:9000
+mapred.job.tracker=localhost:9001
65 samples/wordcount-cron/wordcount-withoutjob-context.xml
View
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
+ xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:p="http://www.springframework.org/schema/p"
+ xmlns:task="http://www.springframework.org/schema/task"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
+ http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
+ http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
+
+
+ <context:property-placeholder location="hadoop.properties" />
+