Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Tree: 4face9d094
Fetching contributors…

Cannot retrieve contributors at this time

355 lines (288 sloc) 10.429 kB
buildscript {
repositories {
maven { url 'http://repo.springsource.org/plugins-release' }
}
dependencies {
classpath 'org.springframework.build.gradle:docbook-reference-plugin:0.1.5'
}
}
description = 'Spring for Apache Hadoop'
group = 'org.springframework.data'
repositories {
maven { url "http://repo.springsource.org/libs-snapshot" }
}
apply plugin: "java"
apply plugin: 'eclipse'
apply plugin: 'idea'
apply from: "$rootDir/maven.gradle"
apply plugin: 'docbook-reference'
// Common dependencies
dependencies {
// Hadoop
compile("org.apache.hadoop:hadoop-core:$hadoopVersion", optional)
compile("org.apache.hadoop:hadoop-streaming:$hadoopVersion", optional)
compile("org.apache.hadoop:hadoop-tools:$hadoopVersion", optional)
// Logging
compile "org.slf4j:slf4j-api:$slf4jVersion"
compile ("org.slf4j:jcl-over-slf4j:$slf4jVersion", optional)
testRuntime "log4j:log4j:$log4jVersion"
testRuntime "org.slf4j:slf4j-log4j12:$slf4jVersion"
// Spring Framework
compile("org.springframework:spring-core:$springVersion") {
exclude module: "commons-logging"
}
compile "org.springframework:spring-context-support:$springVersion"
compile("org.springframework:spring-tx:$springVersion", optional)
compile("org.springframework:spring-aop:$springVersion", optional)
compile("org.springframework:spring-jdbc:$springVersion", optional)
compile("org.springframework.batch:spring-batch-core:$springBatchVersion", optional)
compile("org.springframework.integration:spring-integration-stream:$springIntVersion", optional)
compile("org.springframework.integration:spring-integration-file:$springIntVersion", optional)
// cascading
compile("cascading:cascading-hadoop:$cascadingVersion", optional)
// Missing dependency in Hadoop 1.0.3
testRuntime "commons-io:commons-io:$commonsioVersion"
//testRuntime "org.codehaus.jackson:jackson-core-asl:$jacksonVersion"
testRuntime "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion"
testRuntime "cglib:cglib:$cglibVersion"
// Hive
compile("org.apache.hive:hive-metastore:$hiveVersion", optional)
compile("org.apache.hive:hive-service:$hiveVersion", optional)
testRuntime "org.apache.hive:hive-common:$hiveVersion"
testRuntime "org.apache.hive:hive-jdbc:$hiveVersion"
testRuntime "org.apache.hive:hive-shims:$hiveVersion"
testRuntime "org.apache.hive:hive-serde:$hiveVersion"
testRuntime "org.apache.thrift:libthrift:$thriftVersion"
testRuntime "org.apache.thrift:libfb303:$thriftVersion"
// Pig
compile("org.apache.pig:pig:$pigVersion", optional)
// HBase
compile("org.apache.hbase:hbase:$hbaseVersion") { dep ->
optional dep
exclude module: "thrift"
}
// Libs dependencies (specified to cope with incompatibilities between them)
// testRuntime "org.antlr:antlr:$antlrVersion"
// testRuntime "org.antlr:antlr-runtime:$antlrVersion"
// Testing
testCompile "junit:junit:$junitVersion"
testCompile "org.mockito:mockito-core:$mockitoVersion"
testCompile "org.springframework:spring-test:$springVersion"
testCompile("javax.annotation:jsr250-api:1.0", optional)
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
testRuntime "org.springframework.integration:spring-integration-event:$springIntVersion"
testRuntime "org.codehaus.groovy:groovy:$groovyVersion"
testRuntime "org.jruby:jruby:$jrubyVersion"
testRuntime "org.python:jython-standalone:$jythonVersion"
testRuntime "org.apache.hive:hive-builtins:$hiveVersion"
// specify a version of antlr that works with both hive and pig
testRuntime "org.antlr:antlr-runtime:$antlrVersion"
testCompile "cascading:cascading-local:$cascadingVersion"
}
sourceCompatibility = 1.6
targetCompatibility = 1.6
ext.skipPig = true
ext.skipHive = true
ext.skipHBase = true
ext.skipWebHdfs = true
task enablePigTests {
description = "Enable Pig tests"
group = "Verification"
doLast() {
project.ext.skipPig = false
}
}
task enableHiveTests {
description = "Enable Hive tests"
group = "Verification"
doLast() {
project.ext.skipHive = false
}
}
task enableHBaseTests {
description = "Enable HBase tests"
group = "Verification"
doLast() {
project.ext.skipHBase = false
}
}
task enableWebHdfsTests {
description = "Enable WebHdfs tests"
group = "Verification"
doLast() {
project.ext.skipWebHdfs = false
}
}
task enableAllTests() {
description = "Enable all (incl. Pig, Hive, HBase, WebHdfs) tests"
group = "Verification"
doFirst() {
println "Enable all tests"
project.ext.skipPig = false
project.ext.skipHBase = false
project.ext.skipHive = false
project.ext.skipWebHdfs = false
}
}
test {
//forkEvery = 1
systemProperties['input.path'] = 'build/classes/test/input'
systemProperties['output.path'] = 'build/classes/test/output'
includes = ["**/*.class"]
doFirst() {
ext.msg = ""
if (skipPig) {
ext.msg += "Pig "
excludes.add("**/pig/**")
}
if (skipHBase) {
ext.msg += "HBase "
excludes.add("**/hbase/**")
}
if (skipHive) {
ext.msg += "Hive "
excludes.add("**/hive/**")
}
if (skipWebHdfs) {
ext.msg += "WebHdfs"
excludes.add("**/WebHdfs*")
}
if (!msg.isEmpty())
println "Skipping [$msg] Tests";
}
}
javadoc {
ext.srcDir = file("${projectDir}/docs/src/api")
configure(options) {
stylesheetFile = file("${srcDir}/spring-javadoc.css")
overview = "${srcDir}/overview.html"
docFilesSubDirs = true
outputLevel = org.gradle.external.javadoc.JavadocOutputLevel.QUIET
breakIterator = true
author = true
showFromProtected()
// groups = [
// 'Spring Data Hadoop' : ['org.springframework.data.hadoop*'],
// ]
links = [
"http://static.springframework.org/spring/docs/3.0.x/javadoc-api",
"http://download.oracle.com/javase/6/docs/api",
"http://logging.apache.org/log4j/1.2/apidocs/",
"http://hadoop.apache.org/common/docs/current/api/",
"http://hbase.apache.org/apidocs/",
"http://pig.apache.org/docs/r0.9.2/api/",
"http://hive.apache.org/docs/r0.8.1/api/",
"http://static.springsource.org/spring-batch/apidocs/",
"http://static.springsource.org/spring-integration/api/",
"https://builds.apache.org/job/Thrift/javadoc/",
"http://jakarta.apache.org/commons/logging/apidocs/",
"http://docs.cascading.org/cascading/2.0/javadoc/"
]
exclude "org/springframework/data/hadoop/config/**"
}
title = "${rootProject.description} ${version} API"
}
jar {
manifest.attributes['Implementation-Title'] = 'spring-data-hadoop'
manifest.attributes['Implementation-Version'] = project.version
manifest.attributes['Build'] = "" + System.env['SHDP.BUILD']
manifest.attributes['Repository-Revision'] = "" + System.env['SHDP.REV']
from("$rootDir/docs/src/info") {
include "license.txt"
include "notice.txt"
into "META-INF"
expand(copyright: new Date().format('yyyy'), version: project.version)
}
}
task sourcesJar(type: Jar, dependsOn:classes) {
classifier = 'sources'
from sourceSets.main.allJava
}
task javadocJar(type: Jar) {
classifier = 'javadoc'
from javadoc
}
reference {
sourceDir = file('docs/src/reference/docbook')
}
task docsZip(type: Zip) {
group = 'Distribution'
classifier = 'docs'
description = "Builds -${classifier} archive containing api and reference for deployment"
from('docs/src/info') {
include 'changelog.txt'
}
from (javadoc) {
into 'api'
}
from (reference) {
into 'reference'
}
}
task schemaZip(type: Zip) {
group = 'Distribution'
classifier = 'schema'
description = "Builds -${classifier} archive containing all XSDs for deployment"
def Properties schemas = new Properties();
sourceSets.main.resources.find {
it.path.endsWith('META-INF' + File.separator + 'spring.schemas')
}?.withInputStream { schemas.load(it) }
for (def key : schemas.keySet()) {
def shortName = key.replaceAll(/http.*schema.(.*).spring-.*/, '$1')
def alias = key.replaceAll(/http.*schema.(.*).(spring-.*)/, '$2')
assert shortName != key
File xsdFile = sourceSets.main.resources.find {
it.path.replace('\\', '/').endsWith(schemas.get(key))
}
assert xsdFile != null
into (shortName) {
from xsdFile.path
rename { String fileName -> alias }
}
}
}
task distZip(type: Zip, dependsOn: [jar, docsZip, schemaZip, sourcesJar, javadocJar]) {
group = 'Distribution'
classifier = 'dist'
description = "Builds -${classifier} archive, containing all jars and docs, " +
"suitable for community download page."
ext.zipRootDir = "${project.name}-${project.version}"
into (zipRootDir) {
from('docs/src/info') {
include 'readme.txt'
include 'license.txt'
include 'notice.txt'
expand(copyright: new Date().format('yyyy'), version: project.version)
}
from('samples/') {
into 'samples'
exclude '**/build/**'
exclude '**/bin/**'
exclude '**/.settings/**'
exclude '**/.gradle/**'
exclude '**/.*'
}
from(zipTree(docsZip.archivePath)) {
into "docs"
}
from(zipTree(schemaZip.archivePath)) {
into "schema"
}
into ("dist") {
from rootProject.collect { project -> project.libsDir }
}
}
}
artifacts {
archives sourcesJar
archives javadocJar
archives docsZip
archives schemaZip
archives distZip
}
task wrapper(type: Wrapper) {
description = 'Generates gradlew[.bat] scripts'
gradleVersion = '1.0'
}
assemble.dependsOn = ['jar', 'sourcesJar']
defaultTasks 'build'
Jump to Line
Something went wrong with that request. Please try again.