Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

initial (non-working) project shell

  • Loading branch information...
commit 4e5cacfd2e01f330f56f6b25c6eeb15e377e3a43 1 parent 6156da1
mpollack authored
Showing with 1,153 additions and 0 deletions.
  1. +1 −0  .gradle/1.0-rc-2/taskArtifacts/cache.properties
  2. +1 −0  .gradle/1.0-rc-2/taskArtifacts/cache.properties.lock
  3. BIN  .gradle/1.0-rc-2/taskArtifacts/fileHashes.bin
  4. BIN  .gradle/1.0-rc-2/taskArtifacts/fileSnapshots.bin
  5. BIN  .gradle/1.0-rc-2/taskArtifacts/outputFileStates.bin
  6. BIN  .gradle/1.0-rc-2/taskArtifacts/taskArtifacts.bin
  7. +28 −0 build.gradle
  8. +10 −0 gradle.properties
  9. BIN  gradle/wrapper/gradle-wrapper.jar
  10. +6 −0 gradle/wrapper/gradle-wrapper.properties
  11. +164 −0 gradlew
  12. +90 −0 gradlew.bat
  13. +23 −0 plugin-hdfs/build.gradle
  14. +7 −0 plugin-hdfs/gradle.properties
  15. +41 −0 plugin-hdfs/src/main/java/org/springframework/data/hadoop/impala/hdfs/HdfsConfiguration.java
  16. +248 −0 plugin-hdfs/src/main/java/org/springframework/data/hadoop/impala/hdfs/commands/FsShellCommand.java
  17. +46 −0 plugin-hdfs/src/main/java/org/springframework/data/hadoop/impala/hdfs/commands/HdfsTargetCommand.java
  18. +75 −0 plugin-hdfs/src/main/java/org/springframework/data/hadoop/impala/hdfs/provider/HDFSPluginBannerProvider.java
  19. +43 −0 ...s/src/main/java/org/springframework/data/hadoop/impala/hdfs/provider/HDFSPluginHistoryFileNameProvider.java
  20. +45 −0 plugin-hdfs/src/main/java/org/springframework/data/hadoop/impala/hdfs/provider/HDFSPluginPromptProvider.java
  21. +13 −0 plugin-hdfs/src/main/resources/META-INF/spring/spring-shell-plugin.xml
  22. +87 −0 ...rc/test/java/org/springframework/datda/hadoop/impala/hdfs/provider/SpringHadoopAdminBannerProviderTest.java
  23. +65 −0 ...ava/org/springframework/datda/hadoop/impala/hdfs/provider/SpringHadoopAdminHistoryFileNameProviderTest.java
  24. +66 −0 ...rc/test/java/org/springframework/datda/hadoop/impala/hdfs/provider/SpringHadoopAdminPromptProviderTest.java
  25. +22 −0 plugin-mapreduce/build.gradle
  26. +7 −0 plugin-mapreduce/gradle.properties
  27. +48 −0 ...in-mapreduce/src/main/java/org/springframework/data/hadoop/impala/mapreduce/commands/MapReduceCommands.java
  28. +10 −0 plugin-mapreduce/src/main/resources/META-INF/spring/spring-shell-plugin.xml
  29. +7 −0 settings.gradle
View
1  .gradle/1.0-rc-2/taskArtifacts/cache.properties
@@ -0,0 +1 @@
+#Tue Jul 03 15:07:07 PDT 2012
View
1  .gradle/1.0-rc-2/taskArtifacts/cache.properties.lock
@@ -0,0 +1 @@
+
View
BIN  .gradle/1.0-rc-2/taskArtifacts/fileHashes.bin
Binary file not shown
View
BIN  .gradle/1.0-rc-2/taskArtifacts/fileSnapshots.bin
Binary file not shown
View
BIN  .gradle/1.0-rc-2/taskArtifacts/outputFileStates.bin
Binary file not shown
View
BIN  .gradle/1.0-rc-2/taskArtifacts/taskArtifacts.bin
Binary file not shown
View
28 build.gradle
@@ -0,0 +1,28 @@
+description = 'Impala'
+
+allprojects {
+ group = "org.springframework.data.hadoop"
+
+ apply plugin: 'base'
+ apply plugin: 'java'
+
+ repositories {
+ mavenCentral()
+ maven { url "http://repo.springsource.org/snapshot" }
+ maven { url "http://repo.springsource.org/release" }
+ maven { url "http://repo.springsource.org/milestone" }
+ maven { url "http://mvnrepository.com/artifact" }
+ }
+
+ dependencies {
+ compile "commons-configuration:commons-configuration:$commonsConfigVersion"
+ compile "org.codehaus.jackson:jackson-core-asl:$jacksonVersion"
+ compile "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion"
+ compile "org.apache.hadoop:hadoop-core:$hadoopVersion"
+
+ testCompile "junit:junit:$junitVersion"
+ testCompile "org.springframework:spring-test:$springVersion"
+ }
+}
+
+defaultTasks 'clean', 'build'
View
10 gradle.properties
@@ -0,0 +1,10 @@
+hadoopVersion = 1.0.0
+jacksonVersion = 1.9.6
+commonsConfigVersion = 1.8
+springBatchAdminVersion = 1.2.1.RELEASE
+springVersion = 3.1.1.RELEASE
+
+junitVersion = 4.8.1
+
+version = 1.0.0.BUILD-SNAPSHOT
+
View
BIN  gradle/wrapper/gradle-wrapper.jar
Binary file not shown
View
6 gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Wed Apr 25 17:59:38 CST 2012
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=http\://services.gradle.org/distributions/gradle-1.0-rc-2-bin.zip
View
164 gradlew
@@ -0,0 +1,164 @@
+#!/bin/bash
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn ( ) {
+ echo "$*"
+}
+
+die ( ) {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+esac
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched.
+if $cygwin ; then
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+fi
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/"
+APP_HOME="`pwd -P`"
+cd "$SAVED"
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query businessSystem maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ JAVA_OPTS="$JAVA_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
+function splitJvmOpts() {
+ JVM_OPTS=("$@")
+}
+eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
+JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+
+exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
View
90 gradlew.bat
@@ -0,0 +1,90 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windowz variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+if "%@eval[2+2]" == "4" goto 4NT_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+goto execute
+
+:4NT_args
+@rem Get arguments from the 4NT Shell from JP Software
+set CMD_LINE_ARGS=%$
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
View
23 plugin-hdfs/build.gradle
@@ -0,0 +1,23 @@
+
+description = 'Impala HDFS plugin'
+apply plugin: 'eclipse'
+
+repositories {
+ maven { url "http://spring-roo-repository.springsource.org/release" }
+}
+
+dependencies {
+ compile "org.springframework.shell:spring-shell:$springShellVersion"
+ compile "org.apache.hadoop:hadoop-core:$hadoopVersion"
+}
+
+
+
+eclipse {
+ project {
+ name = "plugin-hdfs"
+ }
+}
+
+
+defaultTasks 'clean', 'build'
View
7 plugin-hdfs/gradle.properties
@@ -0,0 +1,7 @@
+springVersion = 3.1.1.RELEASE
+springShellVersion = 1.0.0.BUILD-SNAPSHOT
+hadoopVersion = 1.0.3
+
+
+
+
View
41 plugin-hdfs/src/main/java/org/springframework/data/hadoop/impala/hdfs/HdfsConfiguration.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.impala.hdfs;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class HdfsConfiguration {
+
+ private static final String DFS_KEY = "dfs.default.name";
+
+ private static final String DEFAULT_DFS_NAME = "localhost:50070";
+
+ private Map<String, String> props = new HashMap<String, String>();
+
+ public void setDfsName(String dfsName) {
+ props.put(DFS_KEY, dfsName);
+
+ }
+
+ public String getDfsName() {
+ if (props.containsKey(DFS_KEY)){
+ return props.get(DFS_KEY);
+ }
+ return DEFAULT_DFS_NAME;
+
+ }
+}
View
248 ...fs/src/main/java/org/springframework/data/hadoop/impala/hdfs/commands/FsShellCommand.java
@@ -0,0 +1,248 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.impala.hdfs.commands;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FsShell;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.hadoop.impala.hdfs.HdfsConfiguration;
+import org.springframework.roo.shell.CliAvailabilityIndicator;
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CliOption;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.roo.support.logging.HandlerUtils;
+import org.springframework.stereotype.Component;
+
+/**
+ * HDFS shell commands
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+public class FsShellCommand implements CommandMarker {
+
+ // Constants
+ private static Logger LOGGER = HandlerUtils.getLogger(FsShellCommand.class);
+
+ private FsShell shell;
+
+ @Autowired
+ private HdfsConfiguration hdfsConfiguration;
+
+ @CliAvailabilityIndicator({ "dfs"})
+ public boolean isCommandsAvailable() {
+ return isHDFSUrlSet();
+ }
+
+ //TODO - add back in functionality to read a property file of a well known name to set the default value.
+ // This can be handled using @Value in HdfsConfiguration
+
+ /**
+ * judge whether HDFS URL is set
+ *
+ * @return true - if HDFS URL is set
+ * false - otherwise
+ */
+ protected boolean isHDFSUrlSet() {
+ boolean result = true;
+ String dfsName = hdfsConfiguration.getDfsName();
+ if (dfsName == null || dfsName.length() == 0) {
+ result = false;
+ }
+ return result;
+ }
+
+ //TODO - these should be their own commands.
+
+ @CliCommand(value = "dfs", help = "run dfs commands")
+ public void runDfsCommands(
+ @CliOption(key = { "ls" }, mandatory = false, specifiedDefaultValue = ".", help = "directory to be listed") final String ls,
+ @CliOption(key = { "lsr" }, mandatory = false, specifiedDefaultValue = ".", help = "directory to be listed with recursion") final String lsr,
+ @CliOption(key = { "cat" }, mandatory = false, help = "file to be showed") final String cat,
+ @CliOption(key = { "chgrp" }, mandatory = false, help = "file to be changed group") final String chgrp,
+ @CliOption(key = { "chmod" }, mandatory = false, help = "file to be changed right") final String chmod,
+ @CliOption(key = { "chown" }, mandatory = false, help = "file to be changed owner") final String chown,
+ @CliOption(key = { "copyFromLocal" }, mandatory = false, help = "copy from local to HDFS") final String copyFromLocal,
+ @CliOption(key = { "copyToLocal" }, mandatory = false, help = "copy HDFS to local") final String copyToLocal,
+ @CliOption(key = { "count" }, mandatory = false, help = "file to be count") final String count,
+ @CliOption(key = { "cp" }, mandatory = false, help = "file to be copied") final String cp,
+ @CliOption(key = { "du" }, mandatory = false, help = "display sizes of file") final String du,
+ @CliOption(key = { "dus" }, mandatory = false, help = "display summary sizes of file") final String dus,
+ @CliOption(key = { "expunge" }, mandatory = false, help = "empty the trash") final String expunge,
+ @CliOption(key = { "get" }, mandatory = false, help = "copy to local") final String get,
+ @CliOption(key = { "getmerge" }, mandatory = false, help = "merge file") final String getmerge,
+ @CliOption(key = { "mkdir" }, mandatory = false, help = "create new directory") final String mkdir,
+ @CliOption(key = { "moveFromLocal" }, mandatory = false, help = "move local to HDFS") final String moveFromLocal,
+ @CliOption(key = { "moveToLocal" }, mandatory = false, help = "move local to HDFS") final String moveToLocal,
+ @CliOption(key = { "mv" }, mandatory = false, help = "move file from source to destination") final String mv,
+ @CliOption(key = { "put" }, mandatory = false, help = "copy from local to HDFS") final String put,
+ @CliOption(key = { "rm" }, mandatory = false, help = "remove file") final String rm,
+ @CliOption(key = { "rmr" }, mandatory = false, help = "remove file with recursion") final String rmr,
+ @CliOption(key = { "setrep" }, mandatory = false, help = "set replication number") final String setrep,
+ @CliOption(key = { "stat" }, mandatory = false, help = "return stat information") final String stat,
+ @CliOption(key = { "tail" }, mandatory = false, help = "tail the file") final String tail,
+ @CliOption(key = { "test" }, mandatory = false, help = "check a file") final String test,
+ @CliOption(key = { "text" }, mandatory = false, help = "output the file in text format") final String text,
+ @CliOption(key = { "touchz" }, mandatory = false, help = "create a file of zero lenth") final String touchz
+ ) {
+ try {
+ //TODO - should not recreate shell over and over again.
+ setupShell();
+ } catch (Exception e) {
+ LOGGER.warning("run HDFS shell failed" + e.getMessage() );
+ }
+
+ if (ls != null) {
+ runCommand("-ls",ls);
+ return;
+ }
+ else if (lsr != null) {
+ runCommand("-lsr",lsr);
+ return;
+ }
+ else if (cat != null) {
+ runCommand("-cat",cat);
+ return;
+ }
+ else if (chgrp != null) {
+ runCommand("-chgrp",chgrp);
+ return;
+ }
+ else if (chmod != null) {
+ runCommand("-chmod",chmod);
+ return;
+ }
+ else if (chown != null) {
+ runCommand("-chown",chown);
+ return;
+ }
+ else if (copyFromLocal != null) {
+ runCommand("-copyFromLocal",copyFromLocal);
+ return;
+ }
+ else if (copyToLocal != null) {
+ runCommand("-copyToLocal",copyToLocal);
+ return;
+ }
+ else if (count != null) {
+ runCommand("-count",count);
+ return;
+ }
+ else if (cp != null) {
+ runCommand("-cp",cp);
+ return;
+ }
+ else if (du != null) {
+ runCommand("-du",du);
+ return;
+ }
+ else if (dus != null) {
+ runCommand("-dus",dus);
+ return;
+ }
+ else if (expunge != null) {
+ runCommand("-expunge",expunge);
+ return;
+ }
+ else if (get != null) {
+ runCommand("-get",get);
+ return;
+ }
+ else if (getmerge != null) {
+ runCommand("-getmerge",getmerge);
+ return;
+ }
+ else if (mkdir != null) {
+ runCommand("-mkdir",mkdir);
+ return;
+ }
+ else if (moveFromLocal != null) {
+ runCommand("-moveFromLocal",moveFromLocal);
+ return;
+ }
+ else if (moveToLocal != null) {
+ runCommand("-moveToLocal",moveToLocal);
+ return;
+ }
+ else if (mv != null) {
+ runCommand("-mv",mv);
+ return;
+ }
+ else if (put != null) {
+ runCommand("-put",put);
+ return;
+ }
+ else if (rm != null) {
+ runCommand("-rm",rm);
+ return;
+ }
+ else if (rmr != null) {
+ runCommand("-rmr",rmr);
+ return;
+ }
+ else if (setrep != null) {
+ runCommand("-setrep",setrep);
+ return;
+ }
+ else if (stat != null) {
+ runCommand("-stat",stat);
+ return;
+ }
+ else if (tail != null) {
+ runCommand("-tail",tail);
+ return;
+ }
+ else if (test != null) {
+ runCommand("-test",test);
+ return;
+ }else if (text != null) {
+ runCommand("-text",text);
+ return;
+ }
+ else if (touchz != null) {
+ runCommand("-touchz",touchz);
+ return;
+ }
+ }
+
+ /**
+ * @param value
+ */
+ private void runCommand(String command, String value) {
+ List<String> argv = new ArrayList<String>();
+ argv.add(command);
+ String[] fileNames = value.split(" ");
+ argv.addAll(Arrays.asList(fileNames));
+ try {
+ shell.run(argv.toArray(new String[0]));
+ } catch (Exception e) {
+ LOGGER.warning("run HDFS shell failed. " + e.getMessage());
+ }
+ }
+
+ private void setupShell() throws Exception {
+ Configuration config = new Configuration();
+ config.setStrings("fs.default.name", hdfsConfiguration.getDfsName());
+ shell = new FsShell(config);
+ }
+
+}
View
46 ...src/main/java/org/springframework/data/hadoop/impala/hdfs/commands/HdfsTargetCommand.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.impala.hdfs.commands;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.hadoop.impala.hdfs.HdfsConfiguration;
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CliOption;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.stereotype.Component;
+
+/**
+ * Target command to set HDFS URL
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+public class HdfsTargetCommand implements CommandMarker {
+
+ @Autowired
+ private HdfsConfiguration hdfsConfiguration;
+
+ /**
+ * set HDFS URL
+ *
+ * @param url HDFS url, for example, "hdfs://localhost:9000".
+ */
+ @CliCommand(value = "hdfs target", help = "set HDFS URL")
+ public void dfsName(@CliOption(key = { "url" }, mandatory = true, help = "HDFS URL") final String url) {
+ hdfsConfiguration.setDfsName(url);
+ }
+}
View
75 ...n/java/org/springframework/data/hadoop/impala/hdfs/provider/HDFSPluginBannerProvider.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.impala.hdfs.provider;
+
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.roo.support.util.StringUtils;
+import org.springframework.shell.plugin.support.DefaultBannerProvider;
+import org.springframework.stereotype.Component;
+
+/**
+ * Banner Provider to customize Spring Shell Banner
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+@Order(Ordered.LOWEST_PRECEDENCE)
+public class HDFSPluginBannerProvider extends DefaultBannerProvider
+ implements CommandMarker {
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.BannerProvider#getBanner()
+ */
+ @CliCommand(value = { "version" }, help = "Displays current CLI version")
+ public String getBanner() {
+ StringBuffer buf = new StringBuffer();
+ buf.append("=======================================" + StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("* CLI Plugin for HDFS *" +StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("* *"+ StringUtils.LINE_SEPARATOR);
+ buf.append("=======================================" + StringUtils.LINE_SEPARATOR);
+ buf.append("Verson:" + this.getVersion());
+ return buf.toString();
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.BannerProvider#getVersion()
+ */
+ public String getVersion() {
+ return "1.0.0";
+ }
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.BannerProvider#getWelcomeMessage()
+ */
+ public String getWelcomeMessage() {
+ return "Welcome to HDFS CLI";
+ }
+
+ @Override
+ public String name() {
+ return "hdfs cli banner provider";
+ }
+
+
+}
View
43 ...g/springframework/data/hadoop/impala/hdfs/provider/HDFSPluginHistoryFileNameProvider.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.data.hadoop.impala.hdfs.provider;
+
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider;
+import org.springframework.stereotype.Component;
+
+/**
+ * history file name provider to customize Spring Shell's log file
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+@Order(Ordered.LOWEST_PRECEDENCE)
+public class HDFSPluginHistoryFileNameProvider extends DefaultHistoryFileNameProvider{
+
+ public String getHistoryFileName() {
+ return "hdfs-cli.log";
+ }
+
+ @Override
+ public String name() {
+ return "hdfs cli history file name provider";
+ }
+
+}
View
45 ...n/java/org/springframework/data/hadoop/impala/hdfs/provider/HDFSPluginPromptProvider.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.impala.hdfs.provider;
+
+import org.springframework.core.Ordered;
+import org.springframework.core.annotation.Order;
+import org.springframework.shell.plugin.support.DefaultPromptProvider;
+import org.springframework.stereotype.Component;
+
+/**
+ * Propmpt provider to customize Spring Shell's prompt text
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+@Order(Ordered.LOWEST_PRECEDENCE)
+public class HDFSPluginPromptProvider extends DefaultPromptProvider {
+
+ /* (non-Javadoc)
+ * @see org.springframework.shell.plugin.PromptProvider#getPromptText()
+ */
+ public String getPromptText() {
+ return "HDFS-CLI>";
+ }
+
+ @Override
+ public String name() {
+ return "hdfs cli prompt provider";
+ }
+
+}
View
13 plugin-hdfs/src/main/resources/META-INF/spring/spring-shell-plugin.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd">
+
+ <context:component-scan base-package="org.springframework.data.hadoop.impala.hdfs">
+ <context:include-filter type="regex" expression="(commands|provider)\..*"/>
+ </context:component-scan>
+
+ <bean class="org.springframework.data.hadoop.admin.cli.hdfs.HdfsConfiguration"/>
+</beans>
View
87 ...pringframework/datda/hadoop/impala/hdfs/provider/SpringHadoopAdminBannerProviderTest.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.datda.hadoop.impala.hdfs.provider;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginBannerProvider;
+
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class SpringHadoopAdminBannerProviderTest{
+
+ private HDFSPluginBannerProvider bannerProvider;
+
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp() throws Exception {
+ bannerProvider = new HDFSPluginBannerProvider();
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @After
+ public void tearDown() throws Exception {
+ bannerProvider = null;
+ }
+
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginBannerProvider#getBanner()}.
+ */
+ @Test
+ public void testGetBanner() {
+ String banner = bannerProvider.getBanner();
+ Assert.assertNotNull(banner);
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginBannerProvider#getVersion()}.
+ */
+ @Test
+ public void testGetVersion() {
+ String version = bannerProvider.getVersion();
+ Assert.assertNotNull(version);
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginBannerProvider#getWelcomMessage()}.
+ */
+ @Test
+ public void testGetWelcomMessage() {
+ String msg = bannerProvider.getWelcomeMessage();
+ Assert.assertNotNull(msg);
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginBannerProvider#name()}.
+ */
+ @Test
+ public void testName() {
+ String name = bannerProvider.name();
+ Assert.assertNotNull(name);
+ }
+
+}
View
65 ...ework/datda/hadoop/impala/hdfs/provider/SpringHadoopAdminHistoryFileNameProviderTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.datda.hadoop.impala.hdfs.provider;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginHistoryFileNameProvider;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class SpringHadoopAdminHistoryFileNameProviderTest {
+
+ private HDFSPluginHistoryFileNameProvider fileNameProvider;
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp() throws Exception {
+ fileNameProvider = new HDFSPluginHistoryFileNameProvider();
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @After
+ public void tearDown() throws Exception {
+ fileNameProvider = null;
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginHistoryFileNameProvider#getHistoryFileName()}.
+ */
+ @Test
+ public void testGetHistoryFileName() {
+ String fileName = fileNameProvider.getHistoryFileName();
+ Assert.assertNotNull(fileName);
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginHistoryFileNameProvider#name()}.
+ */
+ @Test
+ public void testName() {
+ String name = fileNameProvider.name();
+ Assert.assertNotNull(name);
+ }
+
+}
View
66 ...pringframework/datda/hadoop/impala/hdfs/provider/SpringHadoopAdminPromptProviderTest.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.datda.hadoop.impala.hdfs.provider;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginPromptProvider;
+
+/**
+ * @author Jarred Li
+ *
+ */
+public class SpringHadoopAdminPromptProviderTest {
+
+ private HDFSPluginPromptProvider provider;
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp() throws Exception {
+ provider = new HDFSPluginPromptProvider();
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @After
+ public void tearDown() throws Exception {
+ provider = null;
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginPromptProvider#getPromptText()}.
+ */
+ @Test
+ public void testGetPromptText() {
+ String prompt = provider.getPromptText();
+ Assert.assertNotNull(prompt);
+ }
+
+ /**
+ * Test method for {@link org.springframework.data.hadoop.impala.hdfs.provider.HDFSPluginPromptProvider#name()}.
+ */
+ @Test
+ public void testName() {
+ String name = provider.name();
+ Assert.assertNotNull(name);
+ }
+
+}
View
22 plugin-mapreduce/build.gradle
@@ -0,0 +1,22 @@
+
+description = 'Impala MapReduce plugin'
+apply plugin: 'eclipse'
+
+repositories {
+ maven { url "http://spring-roo-repository.springsource.org/release" }
+}
+
+dependencies {
+ compile "org.springframework.shell:spring-shell:$springShellVersion"
+ compile "org.apache.hadoop:hadoop-core:$hadoopVersion"
+}
+
+
+eclipse {
+ project {
+ name = "plugin-mapreduce"
+ }
+}
+
+
+defaultTasks 'clean', 'build'
View
7 plugin-mapreduce/gradle.properties
@@ -0,0 +1,7 @@
+springVersion = 3.1.1.RELEASE
+springShellVersion = 1.0.0.BUILD-SNAPSHOT
+hadoopVersion = 1.0.3
+
+
+
+
View
48 ...ain/java/org/springframework/data/hadoop/impala/mapreduce/commands/MapReduceCommands.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2011-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.hadoop.impala.mapreduce.commands;
+
+import org.springframework.roo.shell.CliCommand;
+import org.springframework.roo.shell.CommandMarker;
+import org.springframework.stereotype.Component;
+
+/**
+ * Commands to submit and interact with MapReduce jobs
+ *
+ * @author Jarred Li
+ *
+ */
+@Component
+public class MapReduceCommands implements CommandMarker {
+
+ /**
+ * Runs a jar file. Users can bundle their Map Reduce code in a jar file and execute it using this command.
+ *
+ */
+ @CliCommand(value = "mr jar", help = "Runs a jar file. Users can bundle their Map Reduce code in a jar file and execute it using this command.")
+ public void runMapReduceJar(
+ ) {
+
+ }
+
+ @CliCommand(value = "mr job", help = "Command to interact with Map Reduce Jobs.")
+ public void interactWithJobs(
+ //Usage: hadoop job [GENERIC_OPTIONS] [-submit <job-file>] | [-status <job-id>] | [-counter <job-id> <group-name> <counter-name>] | [-kill <job-id>] | [-events <job-id> <from-event-#> <#-of-events>] | [-history [all] <jobOutputDir>] | [-list [all]] | [-kill-task <task-id>] | [-fail-task <task-id>] | [-set-priority <job-id> <priority>]
+ ) {
+
+ }
+
+}
View
10 plugin-mapreduce/src/main/resources/META-INF/spring/spring-shell-plugin.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd">
+
+ <context:component-scan base-package="org.springframework.data.hadoop.impala.mapreduce.commands"/>
+
+</beans>
View
7 settings.gradle
@@ -0,0 +1,7 @@
+rootProject.name = 'impala'
+
+include 'plugin-hdfs'
+include 'plugin-mapreduce'
+
+
+
Please sign in to comment.
Something went wrong with that request. Please try again.