Skip to content

Commit

Permalink
Spark master test using mvn with hadoop 2.7
Browse files Browse the repository at this point in the history
  • Loading branch information
huangtianhua committed Aug 7, 2019
1 parent a133175 commit 7fbe200
Show file tree
Hide file tree
Showing 6 changed files with 104 additions and 5 deletions.
14 changes: 14 additions & 0 deletions .zuul.yaml
@@ -0,0 +1,14 @@
- project:
name: theopenlab/spark
check:
jobs:
- spark-build-arm64

- job:
name: spark-build-arm64
parent: init-test
description: |
The spark build in openlab cluster.
run: .zuul/playbooks/spark-build/run.yaml
nodeset: ubuntu-xenial-arm64
timeout: 86400
85 changes: 85 additions & 0 deletions .zuul/playbooks/spark-build/run.yaml
@@ -0,0 +1,85 @@
- hosts: all
tasks:
- name: Build spark master using mvn with hadoop 2.7
shell:
cmd: |
set -ex
sudo apt-get update -y
# Install java
sudo apt-get install default-jre -y
sudo apt-get install default-jdk -y
java_home=$(dirname $(dirname $(update-alternatives --list javac)))
echo "export JAVA_HOME=${java_home}" >> ~/.profile
echo "export PATH=${java_home}/bin:$PATH" >> ~/.profile
source ~/.profile
# Install maven
wget http://www.us.apache.org/dist/maven/maven-3/3.6.1/binaries/apache-maven-3.6.1-bin.tar.gz
tar -xvf apache-maven-3.6.1-bin.tar.gz
export PATH=$PWD/apache-maven-3.6.1/bin:$PATH
# Install vim
sudo apt-get install vim -y

# git snappy-1.0.5
wget http://repository.timesys.com/buildsources/s/snappy/snappy-1.0.5/snappy-1.0.5.tar.gz
tar -xvf snappy-1.0.5.tar.gz

git clone http://github.com/huangtianhua/leveldbjni
git clone http://github.com/huangtianhua/leveldb
export SNAPPY_HOME=`cd snappy-1.0.5; pwd`
export LEVELDBJNI_HOME=`cd leveldbjni; pwd`
export LEVELDB_HOME=`cd leveldb; pwd`

cd ${SNAPPY_HOME}
# get the latest config.guess and config.sub
wget -O config.guess "git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD"
wget -O config.sub "git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD"
./configure --disable-shared --with-pic
make

cd ${LEVELDB_HOME}
export LIBRARY_PATH=${SNAPPY_HOME}
export C_INCLUDE_PATH=${LIBRARY_PATH}
export CPLUS_INCLUDE_PATH=${LIBRARY_PATH}
git apply ../leveldbjni/leveldb_aarch64.patch
make libleveldb.a

# fix the problem: 'aclocal-1.14' is missing on your system
cd ../
sudo apt-get install autoconf -y
wget http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz
tar -zxvf automake-1.14.tar.gz
cd automake-1.14
./bootstrap.sh
./configure
sudo make
sudo make install

cd ${LEVELDBJNI_HOME}
# fix error: configure.ac:36: error: required file 'autotools/compile' not found
mkdir -p ${LEVELDBJNI_HOME}/leveldbjni-linux64-aarch64/target/native-build/autotools/
cp /usr/local/share/automake-1.14/compile ${LEVELDBJNI_HOME}/leveldbjni-linux64-aarch64/target/native-build/autotools/
# build leveldbjni-linux64-aarch64
mvn install -P download -P linux64-aarch64
# build leveldbjni-all
mvn install -P download -P all

# use local levedbjni jar built above
export SPARK_HOME='/home/zuul/src/github.com/theopenlab/spark/'
cp leveldbjni-all/target/leveldbjni-all-99-master-SNAPSHOT.jar ${SPARK_HOME}

# build and test spark
cd ${SPARK_HOME}
mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni-all -Dversion=1.8 -Dpackaging=jar -Dfile=leveldbjni-all-99-master-SNAPSHOT.jar

# fix error: java.net.UnknownHostException: ubuntu: ubuntu: Name or service not known
sudo sed -i -e '/127.0.0.1/ s/\(localhost\)/'$(hostname)' \1/' /etc/hosts

./build/mvn -DskipTests -Phadoop-2.7 -Pyarn -Phive -Phive-thriftserver -Pkinesis-asl -Pmesos clean package
./build/mvn -Phadoop-2.7 -Pyarn -Phive -Phive-thriftserver -Pkinesis-asl -Pmesos --fail-at-end test

chdir: '/home/zuul/src'
executable: /bin/bash
environment: '{{ global_env }}'
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Expand Up @@ -761,7 +761,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
sc = new SparkContext(conf)

// Ensure all executors has started
TestUtils.waitUntilExecutorsUp(sc, 1, 10000)
TestUtils.waitUntilExecutorsUp(sc, 1, 30000)
assert(sc.resources.size === 1)
assert(sc.resources.get(GPU).get.addresses === Array("5", "6"))
assert(sc.resources.get(GPU).get.name === "gpu")
Expand Down Expand Up @@ -789,7 +789,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
sc = new SparkContext(conf)

// Ensure all executors has started
TestUtils.waitUntilExecutorsUp(sc, 1, 10000)
TestUtils.waitUntilExecutorsUp(sc, 1, 30000)
// driver gpu resources file should take precedence over the script
assert(sc.resources.size === 1)
assert(sc.resources.get(GPU).get.addresses === Array("0", "1", "8"))
Expand Down
Expand Up @@ -1353,7 +1353,7 @@ object SparkSubmitSuite extends SparkFunSuite with TimeLimits {
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))

try {
val exitCode = failAfter(1.minute) { process.waitFor() }
val exitCode = failAfter(2.minutes) { process.waitFor() }
if (exitCode != 0) {
fail(s"Process returned with exit code $exitCode. See the log4j logs for more detail.")
}
Expand Down
2 changes: 1 addition & 1 deletion sql/core/pom.xml
Expand Up @@ -178,7 +178,7 @@
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<argLine>-ea -Xmx4g -Xss4m -XX:ReservedCodeCacheSize=${CodeCacheSize}</argLine>
<argLine>-ea -Xmx2g -Xss4m -XX:ReservedCodeCacheSize=${CodeCacheSize}</argLine>
</configuration>
</plugin>
<plugin>
Expand Down
2 changes: 1 addition & 1 deletion sql/hive/pom.xml
Expand Up @@ -252,7 +252,7 @@
<artifactId>scalatest-maven-plugin</artifactId>
<configuration>
<!-- Specially disable assertions since some Hive tests fail them -->
<argLine>-da -Xmx4g -XX:ReservedCodeCacheSize=${CodeCacheSize}</argLine>
<argLine>-da -Xmx2g -XX:ReservedCodeCacheSize=${CodeCacheSize}</argLine>
</configuration>
</plugin>
<plugin>
Expand Down

0 comments on commit 7fbe200

Please sign in to comment.