Permalink
Switch branches/tags
v206 v192 release-16.03-start last-glibc-2.13 black@2016-05-13 binary backups/0.12-release@15293 backups/0.11-release@9315 backups/0.10-release@6725 backups/0.9-release@4651 backups/0.8-release@2530 backups/0.7-release@2398 backups/0.6-release@1775 backups/0.5.1-release@996 backups/0.5-stable@34171 backups/0.5-release@989 backups/xorg-7.5@18179 backups/x86_64-darwin@34171 backups/x-updates@26704 backups/x-updates@22736 backups/usability@34170 backups/udev-173@28837 backups/stdenv-updates@34093 backups/stdenv-updates@32824 backups/stdenv-updates@19858 backups/stdenv-updates@18281 backups/stdenv-updates@15332 backups/stdenv-updates@12144 backups/stdenv-updates@10965 backups/stdenv-updates2@18282 backups/stdenv-updates2@18273 backups/stdenv-updates-merge@10849 backups/stdenv-bootstrap-20100825@23426 backups/stdenv-ada@26758 backups/pure-python@34174 backups/parallel-building-merger@34171 backups/one-click@2549 backups/nixos-pkgs@34170 backups/multitask-builds@34175 backups/multiple-outputs-sandbox@34172 backups/modular-python@26697 backups/master@10848 backups/master@59 backups/mass-update-01@31456 backups/martin@828 backups/martin2@34171 backups/logistics@34171 backups/libpng15@32782 backups/kmod-no-lib-modules@34172 backups/kmod-MODULE_DIR@33576 backups/kernel-config@19023 backups/kde-4.7@34170 backups/glib-2.30@32938 backups/glib-2.30-take2@33502 backups/freebsd-losser@34171 backups/drop-kde4.5@30929 backups/darwin-without-xcode@34172 backups/darwin-updates@34176 backups/cve-2010-3856@34170 backups/armv5tel-linux@18007 18.03 18.03-beta 17.09 17.09-beta 17.03 17.03-beta 16.09 16.09-beta 16.03 16.03-beta 15.09 15.09-beta 0.14 0.13 0.12 0.11 0.10 0.9 0.8 0.7 0.6 0.5.1 0.5 0.4 0.3 0.2 0.1
Nothing to show
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
156 lines (144 sloc) 7.61 KB
{ stdenv, fetchurl, makeWrapper, pkgconfig, which, maven, cmake, jre, bash, coreutils, glibc, protobuf2_5, fuse, snappy, zlib, bzip2, openssl }:
let
common = { version, sha256, dependencies-sha256, tomcat }:
let
# compile the hadoop tarball from sources, it requires some patches
binary-distributon = stdenv.mkDerivation rec {
name = "hadoop-${version}-bin";
src = fetchurl {
url = "mirror://apache/hadoop/common/hadoop-${version}/hadoop-${version}-src.tar.gz";
inherit sha256;
};
# perform fake build to make a fixed-output derivation of dependencies downloaded from maven central (~100Mb in ~3000 files)
fetched-maven-deps = stdenv.mkDerivation {
name = "hadoop-${version}-maven-deps";
inherit src nativeBuildInputs buildInputs configurePhase;
buildPhase = ''
while mvn package -Dmaven.repo.local=$out/.m2 ${mavenFlags} -Dmaven.wagon.rto=5000; [ $? = 1 ]; do
echo "timeout, restart maven to continue downloading"
done
'';
# keep only *.{pom,jar,xml,sha1,so,dll,dylib} and delete all ephemeral files with lastModified timestamps inside
installPhase = ''find $out/.m2 -type f -regex '.+\(\.lastUpdated\|resolver-status\.properties\|_remote\.repositories\)' -delete'';
outputHashAlgo = "sha256";
outputHashMode = "recursive";
outputHash = dependencies-sha256;
};
nativeBuildInputs = [ maven cmake pkgconfig ];
buildInputs = [ fuse snappy zlib bzip2 openssl protobuf2_5 ];
# most of the hardcoded pathes are fixed in 2.9.x and 3.0.0, this list of patched files might be reduced when 2.7.x and 2.8.x will be deprecated
postPatch = ''
for file in hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java \
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java \
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java \
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java \
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java \
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java; do
if [ -f "$file" ]; then
substituteInPlace "$file" \
--replace '/usr/bin/stat' 'stat' \
--replace '/bin/bash' 'bash' \
--replace '/bin/ls' 'ls' \
--replace '/bin/mv' 'mv'
fi
done
'';
configurePhase = "true"; # do not trigger cmake hook
mavenFlags = "-Drequire.snappy -Drequire.bzip2 -DskipTests -Pdist,native -e";
# prevent downloading tomcat during the build
preBuild = stdenv.lib.optionalString (tomcat != null) ''
install -D ${tomcat.src} hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads/apache-tomcat-${tomcat.version}.tar.gz
install -D ${tomcat.src} hadoop-common-project/hadoop-kms/downloads/apache-tomcat-${tomcat.version}.tar.gz
'';
buildPhase = ''
# 'maven.repo.local' must be writable
mvn package --offline -Dmaven.repo.local=$(cp -dpR ${fetched-maven-deps}/.m2 ./ && chmod +w -R .m2 && pwd)/.m2 ${mavenFlags}
# remove runtime dependency on $jdk/jre/lib/amd64/server/libjvm.so
patchelf --set-rpath ${stdenv.lib.makeLibraryPath [glibc]} hadoop-dist/target/hadoop-${version}/lib/native/libhadoop.so.1.0.0
patchelf --set-rpath ${stdenv.lib.makeLibraryPath [glibc]} hadoop-dist/target/hadoop-${version}/lib/native/libhdfs.so.0.0.0
'';
installPhase = "mv hadoop-dist/target/hadoop-${version} $out";
};
in
stdenv.mkDerivation rec {
name = "hadoop-${version}";
src = binary-distributon;
nativeBuildInputs = [ makeWrapper ];
installPhase = ''
mkdir -p $out/share/doc/hadoop
cp -dpR * $out/
mv $out/*.txt $out/share/doc/hadoop/
#
# Do not use `wrapProgram` here, script renaming may result to weird things: http://i.imgur.com/0Xee013.png
#
mkdir -p $out/bin.wrapped
for n in $out/bin/*; do
if [ -f "$n" ]; then # only regular files
mv $n $out/bin.wrapped/
makeWrapper $out/bin.wrapped/$(basename $n) $n \
--prefix PATH : "${stdenv.lib.makeBinPath [ which jre bash coreutils ]}" \
--prefix JAVA_LIBRARY_PATH : "${stdenv.lib.makeLibraryPath [ openssl snappy zlib bzip2 ]}" \
--set JAVA_HOME "${jre}" \
--set HADOOP_PREFIX "$out"
fi
done
'';
meta = with stdenv.lib; {
homepage = "http://hadoop.apache.org/";
description = "Framework for distributed processing of large data sets across clusters of computers";
license = licenses.asl20;
longDescription = ''
The Apache Hadoop software library is a framework that allows for
the distributed processing of large data sets across clusters of
computers using a simple programming model. It is designed to
scale up from single servers to thousands of machines, each
offering local computation and storage. Rather than rely on
hardware to deliver high-avaiability, the library itself is
designed to detect and handle failures at the application layer,
so delivering a highly-availabile service on top of a cluster of
computers, each of which may be prone to failures.
'';
maintainers = with maintainers; [ volth ];
platforms = [ "x86_64-linux" ];
};
};
tomcat_6_0_48 = rec {
version = "6.0.48";
src = fetchurl {
# do not use "mirror://apache/" here, tomcat-6 is legacy and has been removed from the mirrors
url = "https://archive.apache.org/dist/tomcat/tomcat-6/v${version}/bin/apache-tomcat-${version}.tar.gz";
sha256 = "1w4jf28g8p25fmijixw6b02iqlagy2rvr57y3n90hvz341kb0bbc";
};
};
in {
hadoop_2_7 = common {
version = "2.7.6";
sha256 = "0wmg0iy0qxrf43fzajzmx03gxp4yx197vxacqwkxaj45clqwl010";
dependencies-sha256 = "1lsr9nvrynzspxqcamb10d596zlnmnfpxhkd884gdiva0frm0b1r";
tomcat = tomcat_6_0_48;
};
hadoop_2_8 = common {
version = "2.8.4";
sha256 = "16c3ljhrzibkjn3y1bmjxdgf0kn60l23ay5hqpp7vpbnqx52x68w";
dependencies-sha256 = "1j4f461487fydgr5978nnm245ksv4xbvskfr8pbmfhcyss6b7w03";
tomcat = tomcat_6_0_48;
};
hadoop_2_9 = common {
version = "2.9.1";
sha256 = "0qgmpfbpv7f521fkjy5ldzdb4lwiblhs0hyl8qy041ws17y5x7d7";
dependencies-sha256 = "1d5i8jj5y746rrqb9lscycnd7acmxlkz64ydsiyqsh5cdqgy2x7x";
tomcat = tomcat_6_0_48;
};
hadoop_3_0 = common {
version = "3.0.3";
sha256 = "1vvkci0kx4b48dg0niifn2d3r4wwq8pb3c5z20wy8pqsqrqhlci5";
dependencies-sha256 = "1kzkna9ywacm2m1cirj9cyip66bgqjhid2xf9rrhq6g10lhr8j9m";
tomcat = null;
};
hadoop_3_1 = common {
version = "3.1.0";
sha256 = "0lig25jkffkzc2bfgyrnm3wymapgyw9fkai8sk9fnmp7cljia314";
dependencies-sha256 = "1ri6a7lrijh538vy7v0fzgvkw603pf8jkh3ldl1kl7l0dvszd70d";
tomcat = null;
};
}