Skip to content

Commit

Permalink
Add more miscellaneous tests for incomplete image builds
Browse files Browse the repository at this point in the history
  • Loading branch information
tmckayus committed Oct 1, 2018
1 parent c0ae82a commit 67d6d98
Show file tree
Hide file tree
Showing 7 changed files with 109 additions and 39 deletions.
10 changes: 9 additions & 1 deletion modules/s2i/added/assemble
Expand Up @@ -35,20 +35,24 @@ else
# Is the file a directory? If it contains spark-submit, move it
if [ -d "$spark" ]; then
if ! [ -f $spark/bin/spark-submit ]; then
echo Ignoring directory $spark, no spark-submit
continue
fi
echo Installing from directory $spark
sparkdir=$SPARK_INSTALL/$(basename $spark)
mv $spark $SPARK_INSTALL
else
# If we can get the table of contents, it's a tar archive, otherwise ignore
tar -tf $spark &> /dev/null
if [ "$?" -ne 0 ]; then
echo Ignoring $spark, not a tar archive
continue
fi

# Does the tarball contain a spark-submit?
name=$(tar -tzf $spark | grep "spark-submit$")
if [ "$?" -ne 0 ]; then
echo Ignoring tarball $spark, no spark-submit
continue
else
# See if we have an md5 file to match against
Expand All @@ -63,13 +67,14 @@ else
matched="$?"
fi
if [ "$matched" -ne 0 ]; then
echo md5sum did not match
echo Ignoring tarball $spark, md5sum did not match
continue
fi
fi

# dname will be the intial directory from the path of spark-submit
# we found in the tarball, ie the dir created by tar
echo Installing from tarball $spark
dname=$(dirname $name | cut -d/ -f 1)
sparkdir=$SPARK_INSTALL/$dname
tar -xzf $spark -C $SPARK_INSTALL
Expand All @@ -82,6 +87,7 @@ else
# Search for the spark entrypoint file and copy it to $SPARK_INSTALL
entry=$(find $sparkdir/kubernetes -name entrypoint.sh)
if [ -n "$entry" ]; then
echo Copying spark entrypoint
cp $entry $SPARK_INSTALL

# We have to patch the entrypoint to toggle error checking
Expand Down Expand Up @@ -110,6 +116,8 @@ else
if [ "$?" -eq 0 ]; then
echo Spark installed successfully
exit 0
else
echo Spark install failed
fi

# Just in case there is more than one tarball, clean up
Expand Down
10 changes: 9 additions & 1 deletion openshift-spark-build-inc-py36/modules/s2i/added/assemble
Expand Up @@ -35,20 +35,24 @@ else
# Is the file a directory? If it contains spark-submit, move it
if [ -d "$spark" ]; then
if ! [ -f $spark/bin/spark-submit ]; then
echo Ignoring directory $spark, no spark-submit
continue
fi
echo Installing from directory $spark
sparkdir=$SPARK_INSTALL/$(basename $spark)
mv $spark $SPARK_INSTALL
else
# If we can get the table of contents, it's a tar archive, otherwise ignore
tar -tf $spark &> /dev/null
if [ "$?" -ne 0 ]; then
echo Ignoring $spark, not a tar archive
continue
fi

# Does the tarball contain a spark-submit?
name=$(tar -tzf $spark | grep "spark-submit$")
if [ "$?" -ne 0 ]; then
echo Ignoring tarball $spark, no spark-submit
continue
else
# See if we have an md5 file to match against
Expand All @@ -63,13 +67,14 @@ else
matched="$?"
fi
if [ "$matched" -ne 0 ]; then
echo md5sum did not match
echo Ignoring tarball $spark, md5sum did not match
continue
fi
fi

# dname will be the intial directory from the path of spark-submit
# we found in the tarball, ie the dir created by tar
echo Installing from tarball $spark
dname=$(dirname $name | cut -d/ -f 1)
sparkdir=$SPARK_INSTALL/$dname
tar -xzf $spark -C $SPARK_INSTALL
Expand All @@ -82,6 +87,7 @@ else
# Search for the spark entrypoint file and copy it to $SPARK_INSTALL
entry=$(find $sparkdir/kubernetes -name entrypoint.sh)
if [ -n "$entry" ]; then
echo Copying spark entrypoint
cp $entry $SPARK_INSTALL

# We have to patch the entrypoint to toggle error checking
Expand Down Expand Up @@ -110,6 +116,8 @@ else
if [ "$?" -eq 0 ]; then
echo Spark installed successfully
exit 0
else
echo Spark install failed
fi

# Just in case there is more than one tarball, clean up
Expand Down
10 changes: 9 additions & 1 deletion openshift-spark-build-inc/modules/s2i/added/assemble
Expand Up @@ -35,20 +35,24 @@ else
# Is the file a directory? If it contains spark-submit, move it
if [ -d "$spark" ]; then
if ! [ -f $spark/bin/spark-submit ]; then
echo Ignoring directory $spark, no spark-submit
continue
fi
echo Installing from directory $spark
sparkdir=$SPARK_INSTALL/$(basename $spark)
mv $spark $SPARK_INSTALL
else
# If we can get the table of contents, it's a tar archive, otherwise ignore
tar -tf $spark &> /dev/null
if [ "$?" -ne 0 ]; then
echo Ignoring $spark, not a tar archive
continue
fi

# Does the tarball contain a spark-submit?
name=$(tar -tzf $spark | grep "spark-submit$")
if [ "$?" -ne 0 ]; then
echo Ignoring tarball $spark, no spark-submit
continue
else
# See if we have an md5 file to match against
Expand All @@ -63,13 +67,14 @@ else
matched="$?"
fi
if [ "$matched" -ne 0 ]; then
echo md5sum did not match
echo Ignoring tarball $spark, md5sum did not match
continue
fi
fi

# dname will be the intial directory from the path of spark-submit
# we found in the tarball, ie the dir created by tar
echo Installing from tarball $spark
dname=$(dirname $name | cut -d/ -f 1)
sparkdir=$SPARK_INSTALL/$dname
tar -xzf $spark -C $SPARK_INSTALL
Expand All @@ -82,6 +87,7 @@ else
# Search for the spark entrypoint file and copy it to $SPARK_INSTALL
entry=$(find $sparkdir/kubernetes -name entrypoint.sh)
if [ -n "$entry" ]; then
echo Copying spark entrypoint
cp $entry $SPARK_INSTALL

# We have to patch the entrypoint to toggle error checking
Expand Down Expand Up @@ -110,6 +116,8 @@ else
if [ "$?" -eq 0 ]; then
echo Spark installed successfully
exit 0
else
echo Spark install failed
fi

# Just in case there is more than one tarball, clean up
Expand Down
15 changes: 13 additions & 2 deletions test/common.sh
Expand Up @@ -111,8 +111,17 @@ function poll_binary_build() {
local name
local source
local expect_fail
local from_flag
name=$1
source=$2
# We'll pass a tarball directory to test from-archive and the ability
# of the image to detect an unpacked directory. We'll use from-file
# with a directory to test the ability of the image to handle a tarball
if [[ "$source" == *".tgz" ]]; then
from_flag="--from-archive=$source"
else
from_flag="--from-file=$source"
fi
if [ "$#" -eq 3 ]; then
expect_fail=$3
else
Expand All @@ -122,7 +131,9 @@ function poll_binary_build() {
local status
local BUILDNUM

oc start-build $name --from-file=$2
echo "oc start-build $name $from_flag"
oc start-build $name $from_flag


while true; do
BUILDNUM=$(oc get buildconfig $name --template='{{index .status "lastVersion"}}')
Expand All @@ -149,7 +160,7 @@ function poll_binary_build() {
if [ "$tries" -lt 5 ]; then
echo Build failed on push, retrying
sleep 5
oc start-build $name --from-file=$2
oc start-build $name $from_flag
continue
fi
fi
Expand Down
27 changes: 0 additions & 27 deletions test/incomplete/bad_input.sh

This file was deleted.

63 changes: 57 additions & 6 deletions test/incomplete/install_spark.sh
Expand Up @@ -15,6 +15,8 @@ function build_md5 {
poll_binary_build spark "$RESOURCE_DIR"/spark-inputs

os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Attempting to install Spark'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Installing from tarball'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Spark installed successfully'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Push successful'

if [ "$#" -ne 1 ] || [ "$1" != "true" ]; then
Expand All @@ -36,29 +38,78 @@ function build_bad_md5 {
poll_binary_build spark "$RESOURCE_DIR"/spark-inputs true

os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'md5sum did not match'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found'
os::cmd::expect_success 'oc delete buildconfig spark'
}

function build_from_directory {
os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary'
poll_binary_build spark "$RESOURCE_DIR"/spark-inputs/*.tgz

os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Attempting to install Spark'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Installing from directory'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Spark installed successfully'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Push successful'
os::cmd::expect_success 'oc delete buildconfig spark'
}

function tarball_no_submit {
os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary'
poll_binary_build spark "$RESOURCE_DIR"/spark-inputs-no-submit true

os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Ignoring tarball.*no spark-submit'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found'
os::cmd::expect_success 'oc delete buildconfig spark'
}

function directory_no_submit {
os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary'
poll_binary_build spark "$RESOURCE_DIR"/spark-inputs-no-submit/*.tgz true

os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Ignoring directory.*no spark-submit'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found'
os::cmd::expect_success 'oc delete buildconfig spark'
}

function build_bad_tarball {
os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary'
poll_binary_build spark "$THIS" true

os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Ignoring.*not a tar archive'
os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found'
os::cmd::expect_success 'oc delete buildconfig spark'
}

# Handles registries, etc, and sets SPARK_IMAGE to the right value
make_image
make_configmap

echo "++ build with md5"
#build_md5
echo "++ build_md5"
build_md5

echo "++ build without md5"
echo $RESOURCE_DIR
find $RESOURCE_DIR -name "*.md5"
echo "++ build_md5 (md5 deleted)"
md5=$(find $RESOURCE_DIR/spark-inputs -name "*.md5")
rm $md5
skip_app=true
build_md5 $skip_app

echo "++ build with bad md5"
echo "++ build_bad_md5"
mv $RESOURCE_DIR/spark-inputs/$(basename $md5 .md5).bad $md5
build_bad_md5
rm $md5

echo "++ build_from_directory"
build_from_directory

echo "++ tarball_no_submit"
tarball_no_submit

echo "++ directory_no_submit"
directory_no_submit

echo "++ build_bad_tarball"
build_bad_tarball

cleanup_app

os::test::junit::declare_suite_end
13 changes: 12 additions & 1 deletion test/sparkinputs.sh
Expand Up @@ -3,13 +3,24 @@
TOP_DIR=$(readlink -f `dirname "${BASH_SOURCE[0]}"` | grep -o '.*/openshift-spark/')
BUILD_DIR=$TOP_DIR/openshift-spark-build

# See what spark version the image build used
fullname=$(find $BUILD_DIR -name spark-[0-9.]*\.tgz)

# Download the same version to use as a binary build input
filename=$(basename $fullname)
version=$(echo $filename | cut -d '-' -f2)
mkdir -p $TOP_DIR/test/resources/spark-inputs
pushd $TOP_DIR/test/resources/spark-inputs
#wget https://archive.apache.org/dist/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz
wget https://archive.apache.org/dist/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz
wget https://archive.apache.org/dist/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz.md5
echo "spark-$version-bin-hadoop2.7.tgz: FF FF FF FF FF FF CA FE BE EF CA FE BE EF CA FE" > spark-$version-bin-hadoop2.7.tgz.bad
popd

# Make a fake tarball that is missing spark-submit
mkdir -p $TOP_DIR/test/resources/spark-inputs-no-submit
pushd $TOP_DIR/test/resources/spark-inputs-no-submit
mkdir spark-$version-bin-hadoop2.7
touch spark-$version-bin-hadoop2.7/foo
tar -czf spark-$version-bin-hadoop2.7.tgz spark-$version-bin-hadoop2.7
rm -rf spark-$version-bin-hadoop2.7
popd

0 comments on commit 67d6d98

Please sign in to comment.