Skip to content

Commit

Permalink
Merge 3bec609 into 5fd551d
Browse files Browse the repository at this point in the history
  • Loading branch information
umayrh committed Jan 28, 2019
2 parents 5fd551d + 3bec609 commit 434385e
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 9 deletions.
8 changes: 5 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,19 +5,21 @@ sudo: required
dist: trusty

env:
- PYENV_REHASH_TIMEOUT=120
global:
- GLOBAL_ENV_FILE="${HOME}/global_env.sh"
- PYENV_REHASH_TIMEOUT=120

addons:
apt:
packages:
- axel

before_install:
- pip install --user -r test_requirements.txt
- ./scripts/travis-setup.sh

install:
- ./gradlew clean coverage build
- source ${GLOBAL_ENV_FILE}
- ./gradlew --console=plain clean coverage build

after_success:
- coveralls
Expand Down
28 changes: 22 additions & 6 deletions scripts/travis-setup.sh
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
#!/bin/bash

# This setup script sets up a container-like environment for installing packages

set -ex

OS=$(uname -s)

## Service versions
# TODO: this should really come from sparkScala/gradle.properties
SPARK_VERSION=${SPARK_VERSION:-"2.4.0"}
HADOOP_VERSION=${HADOOP_VERSION:-"2.7"}

## Check OS type
## OS-specific package installation
bootstrap() {
if [[ "Linux" == "${OS}" ]]; then
bootstrapLinux
Expand All @@ -22,7 +19,16 @@ bootstrap() {
}

bootstrapLinux() {
createGlobalEnvFile
setupSpark
installPipRequirements
}

## Create a Bash script containing
## 'export' commands
createGlobalEnvFile() {
echo "#!/bin/bash" > ${GLOBAL_ENV_FILE}
echo "set -e" >> ${GLOBAL_ENV_FILE}
}

## Installs a specific version of Spark
Expand All @@ -32,19 +38,29 @@ setupSpark() {
if [[ ! -d "$HOME/.cache/${SPARK_DIST_NAME}" ]]; then
cd $HOME/.cache
rm -fr ./${SPARK_DIST_NAME}.tgz*
# Use axel again whe https://github.com/axel-download-accelerator/axel/issues/133
# Use axel again when https://github.com/axel-download-accelerator/axel/issues/192
# has been fixed.
# axel --quiet http://www-us.apache.org/dist/spark/${SPARK_DIR_NAME}/${SPARK_DIST_NAME}.tgz
wget --quiet http://www-us.apache.org/dist/spark/${SPARK_DIR_NAME}/${SPARK_DIST_NAME}.tgz
ls -alh ${SPARK_DIST_NAME}.tgz
tar -xf ./${SPARK_DIST_NAME}.tgz
# TODO: need a more systematic method for setting up Spark properties
cd ..
fi
export SPARK_HOME="${HOME}/.cache/${SPARK_DIST_NAME}"
# Writing env variables to a file that can be source later by a different
# process. This seems to be better compared to hard-coding all variables in
# an "env: global" block in .travis.yaml
echo "export SPARK_HOME=\"${HOME}/.cache/${SPARK_DIST_NAME}\"" >> ${GLOBAL_ENV_FILE}
# TODO: need a more systematic method for setting up Spark properties
echo "spark.yarn.jars=${SPARK_HOME}/jars/*.jar" > ${SPARK_HOME}/conf/spark-defaults.conf
}

## Installs CI-specific Python packages
installPipRequirements() {
cd ${TRAVIS_BUILD_DIR}
pip install --user -r test_requirements.txt
}

# Retry a given command
retry() {
if "$@"; then
Expand Down

0 comments on commit 434385e

Please sign in to comment.