Skip to content


Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

BIGTOP-21. Add support for Mageia

git-svn-id: 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
commit c115b05ee7a058d961337cf7685e998203431e6c 1 parent e92b03d
@bmahe bmahe authored
38 src/pkg/rpm/hadoop/SPECS/hadoop.spec
@@ -40,7 +40,7 @@
-%if %{!?suse_version:1}0
+%if 0%{?fedora_version}%{?rhel_version}%{?centos_version}
# brp-repack-jars uses unzip to expand jar files
# Unfortunately aspectjtools-1.6.5.jar pulled by ivy contains some files and directories without any read permission
# and make whole process to fail.
@@ -54,10 +54,11 @@
%define alternatives_cmd alternatives
%global initd_dir %{_sysconfdir}/rc.d/init.d
+%if %{?suse_version:1}0
# Only tested on openSUSE 11.4. le'ts update it for previous release when confirmed
%if 0%{suse_version} > 1130
@@ -71,9 +72,12 @@
%define alternatives_cmd update-alternatives
%global initd_dir %{_sysconfdir}/rc.d
+%if 0%{?mgaversion}
+%define alternatives_cmd update-alternatives
+%global initd_dir %{_sysconfdir}/rc.d/init.d
@@ -100,8 +104,8 @@ Source3: hadoop-init.tmpl.suse
Source4: hadoop.1
Source5: hadoop-fuse-dfs.1
Buildroot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
-BuildRequires: lzo-devel, python >= 2.4, git, fuse-devel,fuse, automake, autoconf
-Requires: sh-utils, textutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service
+BuildRequires: python >= 2.4, git, fuse-devel,fuse, automake, autoconf
+Requires: textutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service
Provides: hadoop
# RHEL6 provides natively java
@@ -114,15 +118,23 @@ Requires: jre >= 1.6
%if %{?suse_version:1}0
-BuildRequires: libfuse2, libopenssl-devel, gcc-c++, ant, ant-nodeps, ant-trax
+BuildRequires: libfuse2, libopenssl-devel, gcc-c++, ant, ant-nodeps, ant-trax, liblzo-devel
# Required for init scripts
-Requires: insserv
-BuildRequires: fuse-libs, libtool, redhat-rpm-config
+Requires: sh-utils, insserv
+%if 0%{?fedora_version}%{?rhel_version}%{?centos_version}
+BuildRequires: fuse-libs, libtool, redhat-rpm-config, liblzo-devel
# Required for init scripts
-Requires: redhat-lsb
+Requires: sh-utils, redhat-lsb
+%if 0%{?mgaversion}
+BuildRequires: libfuse-devel, libfuse2 , libopenssl-devel, gcc-c++, ant, libtool, automake, autoconf, liblzo-devel, libzlib-devel
+Requires: chkconfig, xinetd-simple-services, libzlib
Hadoop is a software platform that lets one easily write and
run applications that process vast amounts of data.
@@ -382,7 +394,7 @@ getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HDFS" --she
--slave %{log_hadoop_dirname}/%{hadoop_name} %{hadoop_name}-log %{log_hadoop} \
--slave %{lib_hadoop_dirname}/%{hadoop_name} %{hadoop_name}-lib %{lib_hadoop} \
--slave /etc/%{hadoop_name} %{hadoop_name}-etc %{etc_hadoop} \
- --slave %{man_hadoop}/man1/%{hadoop_name}.1.gz %{hadoop_name}-man %{man_hadoop}/man1/%{name}.1.gz
+ --slave %{man_hadoop}/man1/%{hadoop_name}.1.*z %{hadoop_name}-man %{man_hadoop}/man1/%{name}.1.*z
@@ -402,7 +414,7 @@ fi
%attr(0775,root,hadoop) /var/run/%{name}
%attr(0775,root,hadoop) %{log_hadoop}
2  src/pkg/rpm/hive/SPECS/hive.spec
@@ -200,7 +200,7 @@ fi
%attr(1777,root,root) %{var_lib_hive}/metastore
%doc %{doc_hive}
%define service_macro() \
%files %1 \
2  src/pkg/rpm/oozie/SPECS/oozie.spec
@@ -161,5 +161,5 @@ fi
%docdir %{_docdir}
2  src/pkg/rpm/zookeeper/SPECS/zookeeper.spec
@@ -168,4 +168,4 @@ fi
%doc %{doc_zookeeper}
2  src/site/xdoc/index.xml
@@ -29,7 +29,7 @@ xsi:schemaLocation="
developed by a community with a focus on the system as a whole, rather than individual projects.</p>
<subsection name="Building Bigtop"></subsection>
- <p>Packages have been built on Ubuntu 10.10, CentOS 5 and openSUSE 11.4.
+ <p>Packages have been built on Ubuntu 10.10, CentOS 5, Mageia 1 and openSUSE 11.4.
They can probably be built on other platforms as well.<br/><br/>
Building Bigtop requires the following tools:</p>
Please sign in to comment.
Something went wrong with that request. Please try again.