Permalink
Browse files

Initial attempt at an package for hadoop and hadoop-lzo

  • Loading branch information...
1 parent de78988 commit 94bcc02692972d3731f0c936b6b4184a15f3c149 @mamash mamash committed Oct 5, 2012
View
7 hadoop-lzo/DESCR
@@ -0,0 +1,7 @@
+Hadoop-LZO is a project to bring splittable LZO compression to Hadoop.
+LZO is an ideal compression format for Hadoop due to its combination
+of speed and compression size. However, LZO files are not natively
+splittable, meaning the parallelism that is the core of Hadoop is gone.
+This project re-enables that parallelism with LZO compressed files,
+and also comes with standard utilities (input/output streams, etc)
+for working with LZO files.
View
54 hadoop-lzo/Makefile
@@ -0,0 +1,54 @@
+# $NetBSD$
+#
+
+DISTNAME= hadoop-lzo
+PKGNAME= ${DISTNAME}-0.4.17${GIT_TAG}
+CATEGORIES= archivers
+
+MAINTAINER= filip@joyent.com
+HOMEPAGE= https://github.com/twitter/hadoop-lzo
+COMMENT= Refactored version of hadoop-gpl-compression with LZO
+LICENSE= mit
+
+GIT_REPOSITORIES= hadoop-lzo
+GIT_REPO.hadoop-lzo= git://github.com/twitter/hadoop-lzo.git
+
+PKG_DESTDIR_SUPPORT= user-destdir
+
+USE_LANGUAGES= c c++
+USE_LIBTOOL= yes
+
+BUILD_DEPENDS+= apache-ant-[0-9]*:../../devel/apache-ant
+
+MAKE_ENV+= JAVA_HOME=${PKG_JAVA_HOME} \
+ C_INCLUDE_PATH=${BUILDLINK_PREFIX.lzo}/include \
+ LIBRARY_PATH=${BUILDLINK_PREFIX.lzo}/lib
+
+.if defined(MACHINE_ARCH) && ${MACHINE_ARCH} == "x86_64"
+JAVA_ARCH= amd64
+.else
+JAVA_ARCH= i386
+.endif
+
+SUBST_CLASSES+= ldflags
+SUBST_STAGE.ldflags= pre-build
+SUBST_MESSAGE.ldflags= Fixing LDFLAGS
+SUBST_FILES.ldflags= build.xml
+SUBST_VARS.ldflags= LDFLAGS JAVA_ARCH
+
+HADOOP_HOME?= hadoop
+INSTALLATION_DIRS+= ${HADOOP_HOME}/${DISTNAME}
+
+do-build:
+ cd ${WRKSRC} && ${PKGSRC_SETENV} ${MAKE_ENV} ant package
+
+do-test:
+ cd ${WRKSRC} && ${PKGSRC_SETENV} ${MAKE_ENV} ant test
+
+do-install:
+ cd ${WRKSRC} && ${CP} -Rp build/${PKGNAME_NOREV}-SNAPSHOT/* ${DESTDIR}${PREFIX}/${HADOOP_HOME}/${DISTNAME}/
+
+.include "../../wip/mk/git-package.mk"
+.include "../../archivers/lzo/buildlink3.mk"
+.include "../../mk/java-vm.mk"
+.include "../../mk/bsd.pkg.mk"
View
154 hadoop-lzo/PLIST
@@ -0,0 +1,154 @@
+@comment $NetBSD$
+hadoop/hadoop-lzo/build.xml
+hadoop/hadoop-lzo/docs/api/allclasses-frame.html
+hadoop/hadoop-lzo/docs/api/allclasses-noframe.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/CChecksum.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/DChecksum.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/DistributedLzoIndexer.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/GPLNativeCodeLoader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzoCodec.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzoIndex.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzoIndexer.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzoInputFormatCommon.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzopCodec.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzopDecompressor.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzopInputStream.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/LzopOutputStream.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/CChecksum.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/DChecksum.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/DistributedLzoIndexer.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/GPLNativeCodeLoader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzoCodec.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzoIndex.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzoIndexer.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzoInputFormatCommon.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzopCodec.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzopDecompressor.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzopInputStream.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/class-use/LzopOutputStream.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/package-frame.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/package-summary.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/package-tree.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/compression/lzo/package-use.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/DeprecatedLzoLineRecordReader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/DeprecatedLzoTextInputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/class-use/DeprecatedLzoLineRecordReader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/class-use/DeprecatedLzoTextInputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/package-frame.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/package-summary.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/package-tree.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapred/package-use.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/LzoIndexOutputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/LzoIndexRecordWriter.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/LzoLineRecordReader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/LzoSplitInputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/LzoSplitRecordReader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/LzoTextInputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/class-use/LzoIndexOutputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/class-use/LzoIndexRecordWriter.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/class-use/LzoLineRecordReader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/class-use/LzoSplitInputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/class-use/LzoSplitRecordReader.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/class-use/LzoTextInputFormat.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/package-frame.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/package-summary.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/package-tree.html
+hadoop/hadoop-lzo/docs/api/com/hadoop/mapreduce/package-use.html
+hadoop/hadoop-lzo/docs/api/com/quicklz/QuickLZ.html
+hadoop/hadoop-lzo/docs/api/com/quicklz/class-use/QuickLZ.html
+hadoop/hadoop-lzo/docs/api/com/quicklz/package-frame.html
+hadoop/hadoop-lzo/docs/api/com/quicklz/package-summary.html
+hadoop/hadoop-lzo/docs/api/com/quicklz/package-tree.html
+hadoop/hadoop-lzo/docs/api/com/quicklz/package-use.html
+hadoop/hadoop-lzo/docs/api/constant-values.html
+hadoop/hadoop-lzo/docs/api/deprecated-list.html
+hadoop/hadoop-lzo/docs/api/help-doc.html
+hadoop/hadoop-lzo/docs/api/index-all.html
+hadoop/hadoop-lzo/docs/api/index.html
+hadoop/hadoop-lzo/docs/api/org/apache/hadoop/io/compress/LzoCodec.html
+hadoop/hadoop-lzo/docs/api/org/apache/hadoop/io/compress/class-use/LzoCodec.html
+hadoop/hadoop-lzo/docs/api/org/apache/hadoop/io/compress/package-frame.html
+hadoop/hadoop-lzo/docs/api/org/apache/hadoop/io/compress/package-summary.html
+hadoop/hadoop-lzo/docs/api/org/apache/hadoop/io/compress/package-tree.html
+hadoop/hadoop-lzo/docs/api/org/apache/hadoop/io/compress/package-use.html
+hadoop/hadoop-lzo/docs/api/overview-frame.html
+hadoop/hadoop-lzo/docs/api/overview-summary.html
+hadoop/hadoop-lzo/docs/api/overview-tree.html
+hadoop/hadoop-lzo/docs/api/package-list
+hadoop/hadoop-lzo/docs/api/resources/inherit.gif
+hadoop/hadoop-lzo/docs/api/stylesheet.css
+hadoop/hadoop-lzo/${PKGNAME}-SNAPSHOT.jar
+hadoop/hadoop-lzo/ivy.xml
+hadoop/hadoop-lzo/ivy/ivy-2.0.0-rc2.jar
+hadoop/hadoop-lzo/ivy/ivy-2.2.0.jar
+hadoop/hadoop-lzo/ivy/ivysettings.xml
+hadoop/hadoop-lzo/ivy/libraries.properties
+hadoop/hadoop-lzo/lib/commons-logging-1.0.4.jar
+hadoop/hadoop-lzo/lib/commons-logging-api-1.0.4.jar
+hadoop/hadoop-lzo/lib/junit-3.8.1.jar
+hadoop/hadoop-lzo/lib/native/${OPSYS}-x86-32/libgplcompression.la
+hadoop/hadoop-lzo/src/get_build_revision.sh
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/CChecksum.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/DChecksum.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/DistributedLzoIndexer.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/GPLNativeCodeLoader.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzoCodec.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzoCompressor.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzoDecompressor.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzoIndex.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzoIndexer.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzoInputFormatCommon.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzopCodec.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzopDecompressor.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzopInputStream.java
+hadoop/hadoop-lzo/src/java/com/hadoop/compression/lzo/LzopOutputStream.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapred/DeprecatedLzoLineRecordReader.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapred/DeprecatedLzoTextInputFormat.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapreduce/LzoIndexOutputFormat.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapreduce/LzoIndexRecordWriter.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapreduce/LzoLineRecordReader.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapreduce/LzoSplitInputFormat.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapreduce/LzoSplitRecordReader.java
+hadoop/hadoop-lzo/src/java/com/hadoop/mapreduce/LzoTextInputFormat.java
+hadoop/hadoop-lzo/src/java/com/quicklz/QuickLZ.java
+hadoop/hadoop-lzo/src/java/org/apache/hadoop/io/compress/LzoCodec.java
+hadoop/hadoop-lzo/src/native/.autom4te.cfg
+hadoop/hadoop-lzo/src/native/Makefile.am
+hadoop/hadoop-lzo/src/native/Makefile.in
+hadoop/hadoop-lzo/src/native/aclocal.m4
+hadoop/hadoop-lzo/src/native/bootstrap.sh
+hadoop/hadoop-lzo/src/native/config/config.guess
+hadoop/hadoop-lzo/src/native/config/config.sub
+hadoop/hadoop-lzo/src/native/config/depcomp
+hadoop/hadoop-lzo/src/native/config/install-sh
+hadoop/hadoop-lzo/src/native/config/ltmain.sh
+hadoop/hadoop-lzo/src/native/config/missing
+hadoop/hadoop-lzo/src/native/configure
+hadoop/hadoop-lzo/src/native/configure.ac
+hadoop/hadoop-lzo/src/native/configure.orig
+hadoop/hadoop-lzo/src/native/impl/config.h.in
+hadoop/hadoop-lzo/src/native/impl/gpl-compression.h
+hadoop/hadoop-lzo/src/native/impl/lzo/LzoCompressor.c
+hadoop/hadoop-lzo/src/native/impl/lzo/LzoDecompressor.c
+hadoop/hadoop-lzo/src/native/impl/lzo/lzo.h
+hadoop/hadoop-lzo/src/native/m4/compression_utils.m4
+hadoop/hadoop-lzo/src/native/m4/libtool.m4
+hadoop/hadoop-lzo/src/native/m4/ltoptions.m4
+hadoop/hadoop-lzo/src/native/m4/ltsugar.m4
+hadoop/hadoop-lzo/src/native/m4/ltversion.m4
+hadoop/hadoop-lzo/src/native/m4/lt~obsolete.m4
+hadoop/hadoop-lzo/src/native/packageNativeHadoop.sh
+hadoop/hadoop-lzo/src/test/com/hadoop/compression/lzo/TestLzoCodec.java
+hadoop/hadoop-lzo/src/test/com/hadoop/compression/lzo/TestLzoRandData.java
+hadoop/hadoop-lzo/src/test/com/hadoop/compression/lzo/TestLzopInputStream.java
+hadoop/hadoop-lzo/src/test/com/hadoop/compression/lzo/TestLzopOutputStream.java
+hadoop/hadoop-lzo/src/test/com/hadoop/mapreduce/TestLzoTextInputFormat.java
+hadoop/hadoop-lzo/src/test/data/0.txt
+hadoop/hadoop-lzo/src/test/data/0.txt.lzo
+hadoop/hadoop-lzo/src/test/data/100.txt
+hadoop/hadoop-lzo/src/test/data/100.txt.lzo
+hadoop/hadoop-lzo/src/test/data/1000.txt
+hadoop/hadoop-lzo/src/test/data/1000.txt.lzo
+hadoop/hadoop-lzo/src/test/data/100000.txt
+hadoop/hadoop-lzo/src/test/data/100000.txt.lzo
+hadoop/hadoop-lzo/src/test/data/issue20-lzop.txt
View
BIN hadoop-lzo/conftest
Binary file not shown.
View
5 hadoop-lzo/conftest.c
@@ -0,0 +1,5 @@
+#define HAVE_LZO_LZO2A_H 1
+
+int main() {
+printf("hello world\n");
+}
View
4 hadoop-lzo/distinfo
@@ -0,0 +1,4 @@
+$NetBSD$
+
+SHA1 (patch-build.xml) = 8480c623380e005077aa66f32de691712bf855b1
+SHA1 (patch-src_native_configure) = 1cebf2f3f87167d6644741f87c4e5f773b8f1303
View
36 hadoop-lzo/patches/patch-build.xml
@@ -0,0 +1,36 @@
+$NetBSD$
+
+--- build.xml.orig 2012-09-17 12:34:37.000000000 +0000
++++ build.xml
+@@ -120,10 +120,7 @@
+ <property name="ant_task_repo_url"
+ value="${mvnrepo}${tsk.org}${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
+
+- <!-- set -no-as-needed for gnu ld -->
+- <condition property="native.ldflags" value="" else="-Wl,--no-as-needed">
+- <os family="mac"/>
+- </condition>
++ <property name="native.ldflags" value="@LDFLAGS@"/>
+
+ <!-- the normal classpath -->
+ <path id="classpath">
+@@ -211,7 +208,9 @@
+ <format property="build_time" pattern="MM/dd/yyyy hh:mm aa" timezone="GMT"/>
+ </tstamp>
+ <exec executable="${src.dir}/get_build_revision.sh" outputproperty="build_revision" />
+- <exec executable="whoami" outputproperty="build_author"/>
++ <exec executable="id" outputproperty="build_author">
++ <arg value="-un"/>
++ </exec>
+ <exec executable="uname" outputproperty="build_os">
+ <arg value="-a"/>
+ </exec>
+@@ -273,7 +272,7 @@
+
+ <exec dir="${build.native}" executable="sh" failonerror="true">
+ <env key="OS_NAME" value="${os.name}"/>
+- <env key="OS_ARCH" value="${os.arch}"/>
++ <env key="OS_ARCH" value="@JAVA_ARCH@"/>
+ <env key="LDFLAGS" value="${native.ldflags}"/>
+ <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
+ <env key="NATIVE_SRCDIR" value="${native.src.dir}"/>
View
22 hadoop-lzo/patches/patch-src_native_configure
@@ -0,0 +1,22 @@
+$NetBSD$
+
+--- src/native/configure.orig 2012-09-17 12:34:37.000000000 +0000
++++ src/native/configure
+@@ -11380,7 +11380,7 @@ else
+ if test ! -z "`which otool | grep -v 'no otool'`"; then
+ ac_cv_libname_lzo2=\"`otool -L conftest | grep lzo2 | sed -e 's/^ *//' -e 's/ .*//'`\";
+ elif test ! -z "`which objdump | grep -v 'no objdump'`"; then
+- ac_cv_libname_lzo2="`objdump -p conftest | grep NEEDED | grep lzo2 | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
++ ac_cv_libname_lzo2="`objdump -p conftest | grep NEEDED | grep lzo2 | awk '{print $2}' | sed 's/\(.*\)\W*$/\"\1\"/'`"
+ elif test ! -z "`which ldd | grep -v 'no ldd'`"; then
+ ac_cv_libname_lzo2="`ldd conftest | grep lzo2 | sed 's/^[^A-Za-z0-9]*\([A-Za-z0-9\.]*\)[^A-Za-z0-9]*=>.*$/\"\1\"/'`"
+ else
+@@ -11931,7 +11931,7 @@ done
+ JNI_LDFLAGS=""
+ if test "x$JAVA_HOME" != "x"
+ then
+- JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server"
++ JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server -Wl,-R$JAVA_HOME/jre/lib/$OS_ARCH/server"
+ fi
+ ldflags_bak=$LDFLAGS
+ LDFLAGS="$LDFLAGS $JNI_LDFLAGS"
View
0 hadoop/DESCR
No changes.
View
83 hadoop/Makefile
@@ -0,0 +1,83 @@
+# $NetBSD$
+#
+
+DISTNAME= hadoop-1.0.3
+CATEGORIES= devel
+MASTER_SITES= ${MASTER_SITE_APACHE:=hadoop/common/${DISTNAME}/}
+
+MAINTAINER= INSERT_YOUR_MAIL_ADDRESS_HERE
+HOMEPAGE= http://www.us.apache.org/dist/hadoop/common/hadoop-1.0.3/
+COMMENT= TODO: Short description of the package
+#LICENSE= # TODO: (see mk/license.mk)
+
+PKG_DESTDIR_SUPPORT= user-destdir
+
+USE_LANGUAGES= c c++
+USE_LIBTOOL= yes
+USE_TOOLS+= gmake autoconf automake pax
+
+BUILD_DEPENDS+= apache-ant-[0-9]*:../../devel/apache-ant
+
+.if ${MACHINE_ARCH} == "x86_64"
+HADOOP_ARCH= ${OPSYS}-amd64-64
+.else
+HADOOP_ARCH= ${OPSYS}-x86-32
+.endif
+PLIST_SUBST+= HADOOP_ARCH=${HADOOP_ARCH}
+
+HADOOP_HOME?= ${PREFIX}/hadoop
+
+CHECK_PORTABILITY_SKIP+= src/contrib/streaming/src/test/system/scripts/StreamMapper.sh
+REPLACE_BASH+= bin/hadoop bin/rcc bin/*.sh contrib/hdfsproxy/bin/* \
+ contrib/hod/bin/checknodes
+
+MAKE_ARGS= -Dbuild.classes=${WRKSRC}/hadoop-core-${PKGVERSION_NOREV}.jar \
+ -Dversion=${PKGVERSION_NOREV} -Dcompile.native=true \
+ -Dcompile.c++=true -Dmake.cmd=${GMAKE} -Dlibhdfs=1 \
+ -Dlibrecordio=true -Dskip.record-parser=true
+
+BUILD_TARGETS= compile-core-native compile-c++ compile-c++-libhdfs \
+ compile-c++-pipes compile-c++-utils
+
+DEFAULTS= src/core/core-default.xml src/hdfs/hdfs-default.xml \
+ src/mapred/mapred-default.xml hadoop-examples-1.0.3.jar
+DIST= bin contrib hadoop-ant-${PKGVERSION_NOREV}.jar hadoop-core-${PKGVERSION_NOREV}.jar \
+ hadoop-test-${PKGVERSION_NOREV}.jar hadoop-tools-${PKGVERSION_NOREV}.jar lib webapps
+CONF= capacity-scheduler.xml configuration.xsl core-site.xml hadoop-env.sh hadoop-metrics2.properties \
+ hadoop-policy.xml hdfs-site.xml log4j.properties mapred-queue-acls.xml mapred-site.xml taskcontroller.cfg
+
+.for f in ${CONF}
+CONF_FILES+= share/examples/hadoop/${f} ${PKG_SYSCONFDIR}/${f}
+.endfor
+
+CPPFLAGS= -D_POSIX_C_SOURCE=199506L -D__EXTENSIONS__
+
+post-extract:
+ ${RM} -rf ${WRKSRC}/lib/native/Linux-*
+
+do-build:
+ cd ${WRKSRC} && ${PKGSRC_SETENV} ${MAKE_ENV} ant ${BUILD_TARGETS} ${MAKE_ARGS}
+
+post-build:
+ cd ${WRKSRC} && ant FreeBSD-dist
+ (cd ${WRKSRC}/build/c++ && ${TAR} -cf - ${HADOOP_ARCH}/lib ${HADOOP_ARCH}/include) | \
+ (cd ${WRKSRC}/c++ && ${TAR} -xf -)
+
+do-install:
+ ${MKDIR} ${DESTDIR}${HADOOP_HOME}
+ cd ${WRKSRC} && ${PAX} -rw -p e ${DIST} ${DESTDIR}${HADOOP_HOME}/
+ cd ${WRKSRC}/c++/${HADOOP_ARCH} && ${PAX} -rw -p e include lib ${DESTDIR}${PREFIX}/
+ ${MKDIR} ${DESTDIR}${PREFIX}/share/examples/hadoop
+.for f in ${DEFAULTS}
+ ${INSTALL_DATA} ${WRKSRC}/${f} ${DESTDIR}${PREFIX}/share/examples/hadoop
+.endfor
+ cd ${WRKSRC}/conf && ${PAX} -rw -p e * ${DESTDIR}${PREFIX}/share/examples/hadoop
+ ${MKDIR} ${DESTDIR}${PREFIX}/share/doc/hadoop
+ cd ${WRKSRC} && ${PAX} -rw -p e docs ${DESTDIR}${PREFIX}/share/doc/hadoop
+.for f in ${DOC}
+ ${INSTALL_DATA} ${WRKSRC}/${f} ${DESTDIR}${PREFIX}/share/doc/hadoop
+.endfor
+
+.include "../../mk/java-vm.mk"
+.include "../../security/openssl/buildlink3.mk"
+.include "../../mk/bsd.pkg.mk"
View
2,934 hadoop/PLIST
2,934 additions, 0 deletions not shown because the diff is too large. Please use a local Git client to view these changes.
View
11 hadoop/distinfo
@@ -0,0 +1,11 @@
+$NetBSD$
+
+SHA1 (hadoop-1.0.3.tar.gz) = 5ca6b77e0a600475fae6770c52b47a751f646f9c
+RMD160 (hadoop-1.0.3.tar.gz) = e41421483156fd0fa65d608b206a17cd2a73a989
+Size (hadoop-1.0.3.tar.gz) = 62428860 bytes
+SHA1 (patch-build.xml) = 447a4b483242bc1daf9caadce65a7f1a23e3ff1a
+SHA1 (patch-src_c++_libhdfs_configure.ac) = 8824b342d83cf09bda8f87a9137e30585bf945da
+SHA1 (patch-src_c++_libhdfs_hdfsJniHelper.c) = 90b94d685a64c97ff2ae6bdd833e52e92ffe38b5
+SHA1 (patch-src_c++_pipes_impl_HadoopPipes.cc) = 47eaed78b7052c3e0a3d600ec9df8bc86ae1fce4
+SHA1 (patch-src_c++_task-controller_Makefile.in) = 2911c1dcb4af55e9345cfef4aac24eed245ab19e
+SHA1 (patch-src_native_configure.ac) = e97f2b4a0f75d8cf4e8690605d3cba61af3f2716
View
90 hadoop/patches/patch-build.xml
@@ -0,0 +1,90 @@
+$NetBSD$
+
+--- build.xml.orig 2012-05-08 20:35:00.000000000 +0000
++++ build.xml
+@@ -443,7 +443,7 @@
+ <!-- ====================================================== -->
+ <!-- Compile the Java files -->
+ <!-- ====================================================== -->
+- <target name="record-parser" depends="init" if="javacc.home">
++ <target name="record-parser" if="javacc.home">
+ <javacc
+ target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
+ outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
+@@ -620,7 +620,7 @@
+ </exec>
+ </target>
+
+- <target name="compile-core-native" depends="create-native-configure, compile-core-classes"
++ <target name="compile-core-native" depends="create-native-configure"
+ if="compile.native">
+
+ <mkdir dir="${build.native}/lib"/>
+@@ -2019,7 +2019,7 @@
+ <!-- librecordio targets. -->
+ <!-- ================================================================== -->
+
+- <target name="compile-librecordio" depends="init" if="librecordio" >
++ <target name="compile-librecordio" if="librecordio" >
+ <mkdir dir="${build.librecordio}"/>
+ <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
+ <env key="XERCESCROOT" value="${xercescroot}"/>
+@@ -2053,7 +2053,7 @@
+ </chmod>
+ </target>
+
+- <target name="create-c++-configure" depends="init" if="compile.c++">
++ <target name="create-c++-configure" if="compile.c++">
+ <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
+ failonerror="yes">
+ <arg value="-if"/>
+@@ -2076,7 +2076,7 @@
+ </exec>
+ </target>
+
+- <target name="check-c++-makefiles" depends="init" if="compile.c++">
++ <target name="check-c++-makefiles" if="compile.c++">
+ <condition property="need.c++.utils.makefile">
+ <not> <available file="${build.c++.utils}/Makefile"/> </not>
+ </condition>
+@@ -2097,7 +2097,7 @@
+ </condition>
+ </target>
+
+- <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
++ <target name="check-c++-makefile-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
+ <condition property="need.c++.libhdfs.makefile">
+ <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
+ </condition>
+@@ -2139,6 +2139,7 @@
+ <chmod file="${c++.pipes.src}/configure" perm="ugo+x"/>
+ <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
+ failonerror="yes">
++ <env key="LIBS" value="-lcrypto"/>
+ <arg value="--prefix=${install.c++}"/>
+ </exec>
+ </target>
+@@ -2804,5 +2805,23 @@
+ <fileset file="${jsvc.install.dir}/jsvc.${os.arch}"/>
+ </chmod>
+ </target>
++ <target name="FreeBSD-dist" >
++ <mkdir dir="${dist.dir}"/>
++ <mkdir dir="${dist.dir}/lib"/>
++ <mkdir dir="${dist.dir}/contrib"/>
++ <mkdir dir="${dist.dir}/bin"/>
++
++ <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
++ <fileset dir="lib">
++ <exclude name="**/native/**"/>
++ </fileset>
++ </copy>
+
++ <exec dir="${basedir}" executable="sh" failonerror="true">
++ <env key="BASE_NATIVE_LIB_DIR" value="${basedir}/lib/native"/>
++ <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
++ <env key="DIST_LIB_DIR" value="${basedir}/lib/native"/>
++ <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
++ </exec>
++</target>
+ </project>
View
13 hadoop/patches/patch-src_c++_libhdfs_configure.ac
@@ -0,0 +1,13 @@
+$NetBSD$
+
+--- src/c++/libhdfs/configure.ac.orig 2012-05-08 20:34:52.000000000 +0000
++++ src/c++/libhdfs/configure.ac
+@@ -80,7 +80,7 @@ AP_JVM_LIBDIR()
+ if test "$supported_os" != "darwin"
+ then
+ CFLAGS="$CFLAGS -m${JVM_ARCH} -I$JAVA_HOME/include -I$JAVA_HOME/include/$supported_os"
+- LDFLAGS="$LDFLAGS -m${JVM_ARCH} -L$LIB_JVM_DIR -ljvm -shared -Wl,-x"
++ LDFLAGS="$LDFLAGS -m${JVM_ARCH} -L$LIB_JVM_DIR -ljvm -shared"
+ AC_MSG_RESULT([VALUE OF JVM_ARCH IS :$JVM_ARCH])
+ fi
+
View
15 hadoop/patches/patch-src_c++_libhdfs_hdfsJniHelper.c
@@ -0,0 +1,15 @@
+$NetBSD$
+
+--- src/c++/libhdfs/hdfsJniHelper.c.orig 2012-05-08 20:34:53.000000000 +0000
++++ src/c++/libhdfs/hdfsJniHelper.c
+@@ -15,7 +15,9 @@
+ */
+
+ #include <string.h>
+-#include <error.h>
++#if !defined(__sun)
++# include <error.h>
++#endif
+ #include "hdfsJniHelper.h"
+
+ static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER;
View
12 hadoop/patches/patch-src_c++_pipes_impl_HadoopPipes.cc
@@ -0,0 +1,12 @@
+$NetBSD$
+
+--- src/c++/pipes/impl/HadoopPipes.cc.orig 2012-05-08 20:34:52.000000000 +0000
++++ src/c++/pipes/impl/HadoopPipes.cc
+@@ -34,6 +34,7 @@
+ #include <pthread.h>
+ #include <iostream>
+ #include <fstream>
++#include <unistd.h>
+
+ #include <openssl/hmac.h>
+ #include <openssl/buffer.h>
View
13 hadoop/patches/patch-src_c++_task-controller_Makefile.in
@@ -0,0 +1,13 @@
+$NetBSD$
+
+--- src/c++/task-controller/Makefile.in.orig 2012-05-08 20:34:52.000000000 +0000
++++ src/c++/task-controller/Makefile.in
+@@ -197,7 +197,7 @@ sbindir = @sbindir@
+ sharedstatedir = @sharedstatedir@
+ sysconfdir = @sysconfdir@
+ target_alias = @target_alias@
+-AM_CFLAGS = -I$(srcdir)/impl -Wall -g -Werror
++AM_CFLAGS = -I$(srcdir)/impl -Wall -g
+ TESTS = test-task-controller
+
+ # Define the sources for the common files
View
12 hadoop/patches/patch-src_native_configure.ac
@@ -0,0 +1,12 @@
+$NetBSD$
+
+--- src/native/configure.ac.orig 2012-05-08 20:34:53.000000000 +0000
++++ src/native/configure.ac
+@@ -39,6 +39,7 @@ AC_CONFIG_SRCDIR([src/org_apache_hadoop.
+ AC_CONFIG_AUX_DIR(config)
+ AC_CONFIG_HEADER([config.h])
+ AC_SYS_LARGEFILE
++AC_USE_SYSTEM_EXTENSIONS
+
+ AM_INIT_AUTOMAKE(hadoop,1.0.0)
+

0 comments on commit 94bcc02

Please sign in to comment.