Permalink
Browse files

remove samples and update ref docs

  • Loading branch information...
1 parent d8649d6 commit 712f5aa980e5bbbb23b87ceb69128e190779ac39 mpollack committed Oct 5, 2012
Showing with 7 additions and 2,180 deletions.
  1. +2 −6 docs/src/reference/docbook/samples/introduction.xml
  2. +5 −0 samples/README.txt
  3. +0 −37 samples/batch-wordcount/build.gradle
  4. +0 −40 samples/batch-wordcount/readme.txt
  5. +0 −11 samples/batch-wordcount/src/main/resources/META-INF/spring/batch-common.xml
  6. +0 −13 samples/batch-wordcount/src/main/resources/META-INF/spring/hadoop-context.xml
  7. +0 −35 samples/batch-wordcount/src/main/resources/META-INF/spring/wordcount-context.xml
  8. +0 −9 samples/batch-wordcount/src/main/resources/batch.properties
  9. +0 −27 samples/batch-wordcount/src/main/resources/cp-data.js
  10. +0 −6 samples/batch-wordcount/src/main/resources/hadoop.properties
  11. +0 −18 samples/batch-wordcount/src/main/resources/launch-context.xml
  12. +0 −9 samples/batch-wordcount/src/main/resources/log4j.properties
  13. +0 −45 ...batch-wordcount/src/test/java/org/springframework/data/hadoop/samples/WordCountWorkflowTests.java
  14. +0 −12 samples/gradle.properties
  15. BIN samples/gradle/wrapper/gradle-wrapper.jar
  16. +0 −6 samples/gradle/wrapper/gradle-wrapper.properties
  17. +0 −164 samples/gradlew
  18. +0 −90 samples/gradlew.bat
  19. +0 −35 samples/hbase-crud/build.gradle
  20. +0 −37 samples/hbase-crud/readme.txt
  21. +0 −166 samples/hbase-crud/src/main/java/org/springframework/data/hadoop/samples/hbase/HBaseAction.java
  22. +0 −45 samples/hbase-crud/src/main/java/org/springframework/data/hadoop/samples/hbase/Main.java
  23. +0 −24 samples/hbase-crud/src/main/resources/META-INF/spring/context.xml
  24. +0 −10 samples/hbase-crud/src/main/resources/hadoop.properties
  25. +0 −8 samples/hbase-crud/src/main/resources/log4j.properties
  26. +0 −53 samples/pig-scripting/build.gradle
  27. +0 −38 samples/pig-scripting/readme.txt
  28. +0 −29 samples/pig-scripting/src/main/java/org/springframework/data/hadoop/samples/pig/Main.java
  29. +0 −1 samples/pig-scripting/src/main/resources/.gitignore
  30. +0 −11 samples/pig-scripting/src/main/resources/META-INF/spring/batch-common.xml
  31. +0 −43 samples/pig-scripting/src/main/resources/META-INF/spring/context.xml
  32. +0 −3 samples/pig-scripting/src/main/resources/core-site.xml
  33. +0 −27 samples/pig-scripting/src/main/resources/cp-data.js
  34. +0 −8 samples/pig-scripting/src/main/resources/hadoop.properties
  35. +0 −22 samples/pig-scripting/src/main/resources/log4j.properties
  36. +0 −6 samples/pig-scripting/src/main/resources/script.pig
  37. +0 −39 samples/readme.txt
  38. +0 −836 samples/resources/data/nietzsche-chapter-1.txt
  39. +0 −30 samples/wordcount/build.gradle
  40. +0 −38 samples/wordcount/readme.txt
  41. +0 −45 samples/wordcount/src/main/java/org/springframework/data/hadoop/samples/wordcount/Main.java
  42. +0 −33 samples/wordcount/src/main/resources/META-INF/spring/context.xml
  43. +0 −27 samples/wordcount/src/main/resources/cp-data.js
  44. +0 −6 samples/wordcount/src/main/resources/hadoop.properties
  45. +0 −8 samples/wordcount/src/main/resources/log4j.properties
  46. +0 −24 samples/wordcount/src/test/java/org/springframework/data/hadoop/samples/WordCountWorkflowTests.java
@@ -1,10 +1,6 @@
<partintro>
<title>Document structure</title>
- <para>This part of the reference documentation covers the sample applications included with Spring for Apache Hadoop that demonstrate features in a code centric manner. </para>
+ <para>The sample applications have been moved into their own repository so they can be developed independently of the Spring for Apache Hadoop release cycle. They can be found on github <ulink url="https://github.com/SpringSource/spring-hadoop-samples">https://github.com/SpringSource/spring-hadoop-samples</ulink>.</para>
- <para><xref linkend="wordcount"/> describes a standard Spring application that executes the wordcount map-reduce job</para>
-
- <para><xref linkend="batch-wordcount"/> describes a Batch application that executes the wordcount map-reduce job</para>
-
-</partintro>
+</partintro>
View
@@ -0,0 +1,5 @@
+The samples applications have been moved into their own repository so they can be developed independently of the Spring for Apache Hadoop release cycle.
+
+They can be found on github:
+
+https://github.com/SpringSource/spring-hadoop-samples
@@ -1,37 +0,0 @@
-description = 'Spring Hadoop Samples - Spring Batch WordCount'
-
-apply plugin: 'base'
-apply plugin: 'idea'
-apply plugin: 'java'
-apply plugin: 'eclipse' // `gradle eclipse` to generate .classpath/.project
-apply plugin: 'application'
-
-repositories {
- // Public Spring artefacts
- maven { url "http://repo.springsource.org/libs-snapshot" }
-}
-
-//load version from the root folder
-def props = new Properties();
-new File("../gradle.properties").withInputStream { props.load(it) }
-props.each({ project.ext[it.key] = it.value } )
-version = project.ext.version
-
-dependencies {
- compile "org.springframework.data:spring-data-hadoop:$version"
- compile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
- compile "org.springframework.batch:spring-batch-core:$springBatchVersion"
- // update the version that comes with Batch
- compile "org.springframework:spring-tx:$springVersion"
-
- testCompile "junit:junit:$junitVersion"
- testCompile "org.springframework:spring-test:$springVersion"
-}
-
-sourceSets { main { resources { srcDir file('../resources') } } }
-
-run {
- args 'classpath:/launch-context.xml', 'job1'
-}
-mainClassName = "org.springframework.batch.core.launch.support.CommandLineJobRunner"
-defaultTasks 'run'
@@ -1,40 +0,0 @@
-==========================
-== Batch WordCount Demo ==
-==========================
-
-1. MOTIVATION
-
-A basic word count demo that illustrates the configuration and interaction with the Hadoop through Spring Hadoop
-and Spring Batch. The demo copies local resources into HDFS and executes the Hadoop example that counts words
-against it. The demo requires a running Hadoop instance (by default at localhost:9000).
-The Hadoop settings can be configured through hadoop.properties (more info in the Spring Hadoop reference docs).
-
-2. BUILD AND DEPLOYMENT
-
-This directory contains the source files.
-For building, JDK 1.6+ are required
-
-a) To build and run the sample as a JUnit test, use the following command:
-
-*nix/BSD OS:
-$ ../gradlew
-
-Windows OS:
-$ ..\gradlew
-
-If you have Gradle installed and available in your classpath, you can simply type:
-$ gradle
-
-This launches the Spring Batch CommandLineJobRunner and triggers the job.
-
-3. IDE IMPORT
-
-To import the code inside an IDE run the command
-
-For Eclipse
-$ ../gradlew eclipse
-
-For IDEA
-$ ../gradlew idea
-
-This will generate the IDE specific project files.
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:p="http://www.springframework.org/schema/p"
- xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
-
- <bean id="jobRepository" class="org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean"/>
- <bean id="transactionManager" class="org.springframework.batch.support.transaction.ResourcelessTransactionManager"/>
- <bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher" p:jobRepository-ref="jobRepository"/>
-</beans>
-
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
- xmlns:beans="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
- http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
-
- <configuration>
- <!-- The value after the question mark is the default value if another value for hd.fs is not provided -->
- fs.default.name=${hd.fs:hdfs://localhost:9000}
- </configuration>
-
-</beans:beans>
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
- xmlns:beans="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:batch="http://www.springframework.org/schema/batch"
- xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.1.xsd
- http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
- http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
-
- <batch:job id="job1">
- <batch:step id="import" next="wordcount">
- <batch:tasklet ref="script-tasklet"/>
- </batch:step>
-
- <batch:step id="wordcount">
- <batch:tasklet ref="wordcount-tasklet" />
- </batch:step>
- </batch:job>
-
- <job-tasklet id="wordcount-tasklet" job-ref="wordcount-job"/>
-
- <job id="wordcount-job" input-path="${wordcount.input.path:/user/gutenberg/input/word/}"
- output-path="${wordcount.output.path:/user/gutenberg/output/word/}"
- mapper="org.apache.hadoop.examples.WordCount.TokenizerMapper"
- reducer="org.apache.hadoop.examples.WordCount.IntSumReducer"
- validate-paths="false" />
-
- <script-tasklet id="script-tasklet">
- <script location="cp-data.js">
- <property name="inputPath" value="${wordcount.input.path}" />
- <property name="outputPath" value="${wordcount.output.path}" />
- <property name="localResource" value="${local.data}" />
- </script>
- </script-tasklet>
-
-</beans:beans>
@@ -1,9 +0,0 @@
-# Placeholders batch.*
-# for HSQLDB:
-batch.jdbc.driver=org.hsqldb.jdbcDriver
-batch.jdbc.url=jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true
-batch.jdbc.user=sa
-batch.jdbc.password=
-batch.schema=
-batch.schema.script=classpath:/org/springframework/batch/core/schema-hsqldb.sql
-
@@ -1,27 +0,0 @@
-//
-// Simple script that prepares the HDFS env for a Hadoop job:
-//
-// 1. deletes the input/output paths in HDFS (in case they exist)
-// 2. copies a local resource to HDFS
-//
-// required params are:
-// * inputPath
-// * outputPath
-// * localResource
-
-
-// 'hack' default permissions to make Hadoop work on Windows
-if (java.lang.System.getProperty("os.name").startsWith("Windows")) {
- // 0655 = -rwxr-xr-x
- org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_DIR_PERMISSION.fromShort(0655)
- org.apache.hadoop.mapreduce.JobSubmissionFiles.JOB_FILE_PERMISSION.fromShort(0655)
-}
-
-
-// delete job paths
-if (fsh.test(inputPath)) { fsh.rmr(inputPath) }
-if (fsh.test(outputPath)) { fsh.rmr(outputPath) }
-
-// copy local resource using the streams directly (to be portable across envs)
-inStream = cl.getResourceAsStream(localResource)
-org.apache.hadoop.io.IOUtils.copyBytes(inStream, fs.create(inputPath), cfg)
@@ -1,6 +0,0 @@
-wordcount.input.path=/user/gutenberg/input/word/
-wordcount.output.path=/user/gutenberg/output/word/
-local.data=data/nietzsche-chapter-1.txt
-
-hd.fs=hdfs://localhost:9000
-mapred.job.tracker=localhost:9001
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:context="http://www.springframework.org/schema/context"
- xsi:schemaLocation="
- http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
- http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd">
-
- <context:property-placeholder location="classpath:batch.properties,classpath:hadoop.properties"
- ignore-resource-not-found="true" ignore-unresolvable="true" />
-
- <context:component-scan base-package="org.springframework.data.hadoop.samples" />
-
- <import resource="classpath:/META-INF/spring/batch-common.xml" />
- <import resource="classpath:/META-INF/spring/hadoop-context.xml" />
- <import resource="classpath:/META-INF/spring/wordcount-context.xml" />
-
-</beans>
@@ -1,9 +0,0 @@
-log4j.rootCategory=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - <%m>%n
-
-log4j.logger.org.springframework.batch=WARN
-# for debugging datasource initialization
-# log4j.category.test.jdbc=DEBUG
@@ -1,45 +0,0 @@
-package org.springframework.data.hadoop.samples;
-
-import java.util.Map;
-
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.batch.core.BatchStatus;
-import org.springframework.batch.core.Job;
-import org.springframework.batch.core.JobParameters;
-import org.springframework.batch.core.launch.JobLauncher;
-import org.springframework.beans.factory.BeanInitializationException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.ApplicationContext;
-import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
-
-
-@RunWith(SpringJUnit4ClassRunner.class)
-@ContextConfiguration("/launch-context.xml")
-public class WordCountWorkflowTests {
-
- @Autowired
- private ApplicationContext ctx;
-
- @Test
- public void testWorkflowNS() throws Exception {
- startJobs(ctx);
- }
-
- public void startJobs(ApplicationContext ctx) {
- JobLauncher launcher = ctx.getBean(JobLauncher.class);
- Map<String, Job> jobs = ctx.getBeansOfType(Job.class);
-
- for (Map.Entry<String, Job> entry : jobs.entrySet()) {
- System.out.println("Executing job " + entry.getKey());
- try {
- if (launcher.run(entry.getValue(), new JobParameters()).getStatus().equals(BatchStatus.FAILED)){
- throw new BeanInitializationException("Failed executing job " + entry.getKey());
- }
- } catch (Exception ex) {
- throw new BeanInitializationException("Cannot execute job " + entry.getKey(), ex);
- }
- }
- }
-}
View
@@ -1,12 +0,0 @@
-junitVersion = 4.8.1
-springVersion = 3.0.7.RELEASE
-hadoopVersion = 1.0.3
-springBatchVersion = 2.1.8.RELEASE
-groovyVersion = 1.8.5
-jacksonVersion = 1.8.8
-hbaseVersion = 0.92.1
-pigVersion = 0.9.2
-antVersion = 1.8.3
-antlrVersion = 3.1
-
-version = 1.0.0.BUILD-SNAPSHOT
Binary file not shown.
@@ -1,6 +0,0 @@
-#Thu Oct 04 20:39:16 EEST 2012
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-zipStoreBase=GRADLE_USER_HOME
-zipStorePath=wrapper/dists
-distributionUrl=http\://services.gradle.org/distributions/gradle-1.2-bin.zip
Oops, something went wrong.

0 comments on commit 712f5aa

Please sign in to comment.