From 67d6d98c8f1dd8e244143013babb93d242541f90 Mon Sep 17 00:00:00 2001 From: Trevor McKay Date: Tue, 11 Sep 2018 17:09:25 -0400 Subject: [PATCH] Add more miscellaneous tests for incomplete image builds --- modules/s2i/added/assemble | 10 ++- .../modules/s2i/added/assemble | 10 ++- .../modules/s2i/added/assemble | 10 ++- test/common.sh | 15 ++++- test/incomplete/bad_input.sh | 27 -------- test/incomplete/install_spark.sh | 63 +++++++++++++++++-- test/sparkinputs.sh | 13 +++- 7 files changed, 109 insertions(+), 39 deletions(-) delete mode 100755 test/incomplete/bad_input.sh diff --git a/modules/s2i/added/assemble b/modules/s2i/added/assemble index 6bb759a..2aedffc 100755 --- a/modules/s2i/added/assemble +++ b/modules/s2i/added/assemble @@ -35,20 +35,24 @@ else # Is the file a directory? If it contains spark-submit, move it if [ -d "$spark" ]; then if ! [ -f $spark/bin/spark-submit ]; then + echo Ignoring directory $spark, no spark-submit continue fi + echo Installing from directory $spark sparkdir=$SPARK_INSTALL/$(basename $spark) mv $spark $SPARK_INSTALL else # If we can get the table of contents, it's a tar archive, otherwise ignore tar -tf $spark &> /dev/null if [ "$?" -ne 0 ]; then + echo Ignoring $spark, not a tar archive continue fi # Does the tarball contain a spark-submit? name=$(tar -tzf $spark | grep "spark-submit$") if [ "$?" -ne 0 ]; then + echo Ignoring tarball $spark, no spark-submit continue else # See if we have an md5 file to match against @@ -63,13 +67,14 @@ else matched="$?" fi if [ "$matched" -ne 0 ]; then - echo md5sum did not match + echo Ignoring tarball $spark, md5sum did not match continue fi fi # dname will be the intial directory from the path of spark-submit # we found in the tarball, ie the dir created by tar + echo Installing from tarball $spark dname=$(dirname $name | cut -d/ -f 1) sparkdir=$SPARK_INSTALL/$dname tar -xzf $spark -C $SPARK_INSTALL @@ -82,6 +87,7 @@ else # Search for the spark entrypoint file and copy it to $SPARK_INSTALL entry=$(find $sparkdir/kubernetes -name entrypoint.sh) if [ -n "$entry" ]; then + echo Copying spark entrypoint cp $entry $SPARK_INSTALL # We have to patch the entrypoint to toggle error checking @@ -110,6 +116,8 @@ else if [ "$?" -eq 0 ]; then echo Spark installed successfully exit 0 + else + echo Spark install failed fi # Just in case there is more than one tarball, clean up diff --git a/openshift-spark-build-inc-py36/modules/s2i/added/assemble b/openshift-spark-build-inc-py36/modules/s2i/added/assemble index 6bb759a..87402b8 100755 --- a/openshift-spark-build-inc-py36/modules/s2i/added/assemble +++ b/openshift-spark-build-inc-py36/modules/s2i/added/assemble @@ -35,20 +35,24 @@ else # Is the file a directory? If it contains spark-submit, move it if [ -d "$spark" ]; then if ! [ -f $spark/bin/spark-submit ]; then + echo Ignoring directory $spark, no spark-submit continue fi + echo Installing from directory $spark sparkdir=$SPARK_INSTALL/$(basename $spark) mv $spark $SPARK_INSTALL else # If we can get the table of contents, it's a tar archive, otherwise ignore tar -tf $spark &> /dev/null if [ "$?" -ne 0 ]; then + echo Ignoring $spark, not a tar archive continue fi # Does the tarball contain a spark-submit? name=$(tar -tzf $spark | grep "spark-submit$") if [ "$?" -ne 0 ]; then + echo Ignoring tarball $spark, no spark-submit continue else # See if we have an md5 file to match against @@ -63,13 +67,14 @@ else matched="$?" fi if [ "$matched" -ne 0 ]; then - echo md5sum did not match + echo Ignoring tarball $spark, md5sum did not match continue fi fi # dname will be the intial directory from the path of spark-submit # we found in the tarball, ie the dir created by tar + echo Installing from tarball $spark dname=$(dirname $name | cut -d/ -f 1) sparkdir=$SPARK_INSTALL/$dname tar -xzf $spark -C $SPARK_INSTALL @@ -82,6 +87,7 @@ else # Search for the spark entrypoint file and copy it to $SPARK_INSTALL entry=$(find $sparkdir/kubernetes -name entrypoint.sh) if [ -n "$entry" ]; then + echo Copying spark entrypoint cp $entry $SPARK_INSTALL # We have to patch the entrypoint to toggle error checking @@ -110,6 +116,8 @@ else if [ "$?" -eq 0 ]; then echo Spark installed successfully exit 0 + else + echo Spark install failed fi # Just in case there is more than one tarball, clean up diff --git a/openshift-spark-build-inc/modules/s2i/added/assemble b/openshift-spark-build-inc/modules/s2i/added/assemble index 6bb759a..87402b8 100755 --- a/openshift-spark-build-inc/modules/s2i/added/assemble +++ b/openshift-spark-build-inc/modules/s2i/added/assemble @@ -35,20 +35,24 @@ else # Is the file a directory? If it contains spark-submit, move it if [ -d "$spark" ]; then if ! [ -f $spark/bin/spark-submit ]; then + echo Ignoring directory $spark, no spark-submit continue fi + echo Installing from directory $spark sparkdir=$SPARK_INSTALL/$(basename $spark) mv $spark $SPARK_INSTALL else # If we can get the table of contents, it's a tar archive, otherwise ignore tar -tf $spark &> /dev/null if [ "$?" -ne 0 ]; then + echo Ignoring $spark, not a tar archive continue fi # Does the tarball contain a spark-submit? name=$(tar -tzf $spark | grep "spark-submit$") if [ "$?" -ne 0 ]; then + echo Ignoring tarball $spark, no spark-submit continue else # See if we have an md5 file to match against @@ -63,13 +67,14 @@ else matched="$?" fi if [ "$matched" -ne 0 ]; then - echo md5sum did not match + echo Ignoring tarball $spark, md5sum did not match continue fi fi # dname will be the intial directory from the path of spark-submit # we found in the tarball, ie the dir created by tar + echo Installing from tarball $spark dname=$(dirname $name | cut -d/ -f 1) sparkdir=$SPARK_INSTALL/$dname tar -xzf $spark -C $SPARK_INSTALL @@ -82,6 +87,7 @@ else # Search for the spark entrypoint file and copy it to $SPARK_INSTALL entry=$(find $sparkdir/kubernetes -name entrypoint.sh) if [ -n "$entry" ]; then + echo Copying spark entrypoint cp $entry $SPARK_INSTALL # We have to patch the entrypoint to toggle error checking @@ -110,6 +116,8 @@ else if [ "$?" -eq 0 ]; then echo Spark installed successfully exit 0 + else + echo Spark install failed fi # Just in case there is more than one tarball, clean up diff --git a/test/common.sh b/test/common.sh index 4faeb15..65426b5 100644 --- a/test/common.sh +++ b/test/common.sh @@ -111,8 +111,17 @@ function poll_binary_build() { local name local source local expect_fail + local from_flag name=$1 source=$2 + # We'll pass a tarball directory to test from-archive and the ability + # of the image to detect an unpacked directory. We'll use from-file + # with a directory to test the ability of the image to handle a tarball + if [[ "$source" == *".tgz" ]]; then + from_flag="--from-archive=$source" + else + from_flag="--from-file=$source" + fi if [ "$#" -eq 3 ]; then expect_fail=$3 else @@ -122,7 +131,9 @@ function poll_binary_build() { local status local BUILDNUM - oc start-build $name --from-file=$2 + echo "oc start-build $name $from_flag" + oc start-build $name $from_flag + while true; do BUILDNUM=$(oc get buildconfig $name --template='{{index .status "lastVersion"}}') @@ -149,7 +160,7 @@ function poll_binary_build() { if [ "$tries" -lt 5 ]; then echo Build failed on push, retrying sleep 5 - oc start-build $name --from-file=$2 + oc start-build $name $from_flag continue fi fi diff --git a/test/incomplete/bad_input.sh b/test/incomplete/bad_input.sh deleted file mode 100755 index 4229c23..0000000 --- a/test/incomplete/bad_input.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -THIS=$(readlink -f `dirname "${BASH_SOURCE[0]}"`) -TOP_DIR=$(echo $THIS | grep -o '.*/openshift-spark') - -source $TOP_DIR/hack/lib/init.sh -trap os::test::junit::reconcile_output EXIT - -source $TOP_DIR/test/common.sh -RESOURCE_DIR=$TOP_DIR/test/resources - -os::test::junit::declare_suite_start "install_spark" - -# Handles registries, etc, and sets SPARK_IMAGE to the right value -make_image -make_configmap - -os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary' - -poll_binary_build spark "$THIS" true - -os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found' - -oc delete buildconfig spark - -cleanup_app - -os::test::junit::declare_suite_end diff --git a/test/incomplete/install_spark.sh b/test/incomplete/install_spark.sh index 01533a3..44708c3 100755 --- a/test/incomplete/install_spark.sh +++ b/test/incomplete/install_spark.sh @@ -15,6 +15,8 @@ function build_md5 { poll_binary_build spark "$RESOURCE_DIR"/spark-inputs os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Attempting to install Spark' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Installing from tarball' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Spark installed successfully' os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Push successful' if [ "$#" -ne 1 ] || [ "$1" != "true" ]; then @@ -36,6 +38,45 @@ function build_bad_md5 { poll_binary_build spark "$RESOURCE_DIR"/spark-inputs true os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'md5sum did not match' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found' + os::cmd::expect_success 'oc delete buildconfig spark' +} + +function build_from_directory { + os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary' + poll_binary_build spark "$RESOURCE_DIR"/spark-inputs/*.tgz + + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Attempting to install Spark' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Installing from directory' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Spark installed successfully' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Push successful' + os::cmd::expect_success 'oc delete buildconfig spark' +} + +function tarball_no_submit { + os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary' + poll_binary_build spark "$RESOURCE_DIR"/spark-inputs-no-submit true + + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Ignoring tarball.*no spark-submit' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found' + os::cmd::expect_success 'oc delete buildconfig spark' +} + +function directory_no_submit { + os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary' + poll_binary_build spark "$RESOURCE_DIR"/spark-inputs-no-submit/*.tgz true + + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Ignoring directory.*no spark-submit' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found' + os::cmd::expect_success 'oc delete buildconfig spark' +} + +function build_bad_tarball { + os::cmd::expect_success 'oc new-build --name=spark --docker-image="$SPARK_IMAGE" --binary' + poll_binary_build spark "$THIS" true + + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'Ignoring.*not a tar archive' + os::cmd::expect_success_and_text 'oc log buildconfig/spark' 'no valid Spark distribution found' os::cmd::expect_success 'oc delete buildconfig spark' } @@ -43,22 +84,32 @@ function build_bad_md5 { make_image make_configmap -echo "++ build with md5" -#build_md5 +echo "++ build_md5" +build_md5 -echo "++ build without md5" -echo $RESOURCE_DIR -find $RESOURCE_DIR -name "*.md5" +echo "++ build_md5 (md5 deleted)" md5=$(find $RESOURCE_DIR/spark-inputs -name "*.md5") rm $md5 skip_app=true build_md5 $skip_app -echo "++ build with bad md5" +echo "++ build_bad_md5" mv $RESOURCE_DIR/spark-inputs/$(basename $md5 .md5).bad $md5 build_bad_md5 rm $md5 +echo "++ build_from_directory" +build_from_directory + +echo "++ tarball_no_submit" +tarball_no_submit + +echo "++ directory_no_submit" +directory_no_submit + +echo "++ build_bad_tarball" +build_bad_tarball + cleanup_app os::test::junit::declare_suite_end diff --git a/test/sparkinputs.sh b/test/sparkinputs.sh index 91e7ef5..243ca6f 100755 --- a/test/sparkinputs.sh +++ b/test/sparkinputs.sh @@ -3,13 +3,24 @@ TOP_DIR=$(readlink -f `dirname "${BASH_SOURCE[0]}"` | grep -o '.*/openshift-spark/') BUILD_DIR=$TOP_DIR/openshift-spark-build +# See what spark version the image build used fullname=$(find $BUILD_DIR -name spark-[0-9.]*\.tgz) +# Download the same version to use as a binary build input filename=$(basename $fullname) version=$(echo $filename | cut -d '-' -f2) mkdir -p $TOP_DIR/test/resources/spark-inputs pushd $TOP_DIR/test/resources/spark-inputs -#wget https://archive.apache.org/dist/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz +wget https://archive.apache.org/dist/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz wget https://archive.apache.org/dist/spark/spark-$version/spark-$version-bin-hadoop2.7.tgz.md5 echo "spark-$version-bin-hadoop2.7.tgz: FF FF FF FF FF FF CA FE BE EF CA FE BE EF CA FE" > spark-$version-bin-hadoop2.7.tgz.bad popd + +# Make a fake tarball that is missing spark-submit +mkdir -p $TOP_DIR/test/resources/spark-inputs-no-submit +pushd $TOP_DIR/test/resources/spark-inputs-no-submit +mkdir spark-$version-bin-hadoop2.7 +touch spark-$version-bin-hadoop2.7/foo +tar -czf spark-$version-bin-hadoop2.7.tgz spark-$version-bin-hadoop2.7 +rm -rf spark-$version-bin-hadoop2.7 +popd