Skip to content

Commit

Permalink
Add python36-inc module override and fix Makefile.inc
Browse files Browse the repository at this point in the history
  • Loading branch information
tmckayus committed Oct 1, 2018
1 parent 67d6d98 commit 259f58f
Show file tree
Hide file tree
Showing 11 changed files with 30 additions and 181 deletions.
5 changes: 3 additions & 2 deletions Makefile.inc
Expand Up @@ -40,7 +40,7 @@ $(DOCKERFILE_CONTEXT)/Dockerfile $(DOCKERFILE_CONTEXT)/modules:

$(DOCKERFILE_CONTEXT)-py36/Dockerfile $(DOCKERFILE_CONTEXT)-py36/modules:
-mkdir -p $(DOCKERFILE_CONTEXT)-py36
concreate generate --descriptor image-inc.yaml --overrides overrides/python36.yaml --target target-py36
concreate generate --descriptor image-inc.yaml --overrides overrides/python36-inc.yaml --target target-py36
cp -R target-py36/image/* $(DOCKERFILE_CONTEXT)-py36

zero-tarballs:
Expand All @@ -59,5 +59,6 @@ test-e2e-py:
SPARK_TEST_IMAGE=$(SPARK_TEST_IMAGE) test/run.sh incomplete/

test-e2e-py36:
LOCAL_IMAGE=$(SPARK_TEST_IMAGE)-py36 make -f Makefile.inc build-py36
# build already adds the -py36 to local image
LOCAL_IMAGE=$(SPARK_TEST_IMAGE) make -f Makefile.inc build-py36
SPARK_TEST_IMAGE=$(SPARK_TEST_IMAGE)-py36 test/run.sh incomplete/
3 changes: 0 additions & 3 deletions openshift-spark-build-inc-py36/Dockerfile
Expand Up @@ -59,9 +59,6 @@ RUN [ "bash", "-x", "/tmp/scripts/common/install" ]
USER root
RUN [ "bash", "-x", "/tmp/scripts/metrics/install" ]

USER root
RUN [ "bash", "-x", "/tmp/scripts/spark/install" ]

USER root
RUN [ "bash", "-x", "/tmp/scripts/s2i/install" ]

Expand Down
14 changes: 7 additions & 7 deletions openshift-spark-build-inc-py36/modules/s2i/added/assemble
Expand Up @@ -35,24 +35,24 @@ else
# Is the file a directory? If it contains spark-submit, move it
if [ -d "$spark" ]; then
if ! [ -f $spark/bin/spark-submit ]; then
echo Ignoring directory $spark, no spark-submit
echo Ignoring directory $spark, no spark-submit
continue
fi
echo Installing from directory $spark
echo Installing from directory $spark
sparkdir=$SPARK_INSTALL/$(basename $spark)
mv $spark $SPARK_INSTALL
else
# If we can get the table of contents, it's a tar archive, otherwise ignore
tar -tf $spark &> /dev/null
if [ "$?" -ne 0 ]; then
echo Ignoring $spark, not a tar archive
echo Ignoring $spark, not a tar archive
continue
fi

# Does the tarball contain a spark-submit?
name=$(tar -tzf $spark | grep "spark-submit$")
if [ "$?" -ne 0 ]; then
echo Ignoring tarball $spark, no spark-submit
echo Ignoring tarball $spark, no spark-submit
continue
else
# See if we have an md5 file to match against
Expand All @@ -74,7 +74,7 @@ else

# dname will be the intial directory from the path of spark-submit
# we found in the tarball, ie the dir created by tar
echo Installing from tarball $spark
echo Installing from tarball $spark
dname=$(dirname $name | cut -d/ -f 1)
sparkdir=$SPARK_INSTALL/$dname
tar -xzf $spark -C $SPARK_INSTALL
Expand All @@ -87,7 +87,7 @@ else
# Search for the spark entrypoint file and copy it to $SPARK_INSTALL
entry=$(find $sparkdir/kubernetes -name entrypoint.sh)
if [ -n "$entry" ]; then
echo Copying spark entrypoint
echo Copying spark entrypoint
cp $entry $SPARK_INSTALL

# We have to patch the entrypoint to toggle error checking
Expand Down Expand Up @@ -117,7 +117,7 @@ else
echo Spark installed successfully
exit 0
else
echo Spark install failed
echo Spark install failed
fi

# Just in case there is more than one tarball, clean up
Expand Down

This file was deleted.

This file was deleted.

52 changes: 0 additions & 52 deletions openshift-spark-build-inc-py36/modules/spark/install

This file was deleted.

5 changes: 0 additions & 5 deletions openshift-spark-build-inc-py36/modules/spark/module.yaml

This file was deleted.

Empty file.
14 changes: 7 additions & 7 deletions openshift-spark-build-inc/modules/s2i/added/assemble
Expand Up @@ -35,24 +35,24 @@ else
# Is the file a directory? If it contains spark-submit, move it
if [ -d "$spark" ]; then
if ! [ -f $spark/bin/spark-submit ]; then
echo Ignoring directory $spark, no spark-submit
echo Ignoring directory $spark, no spark-submit
continue
fi
echo Installing from directory $spark
echo Installing from directory $spark
sparkdir=$SPARK_INSTALL/$(basename $spark)
mv $spark $SPARK_INSTALL
else
# If we can get the table of contents, it's a tar archive, otherwise ignore
tar -tf $spark &> /dev/null
if [ "$?" -ne 0 ]; then
echo Ignoring $spark, not a tar archive
echo Ignoring $spark, not a tar archive
continue
fi

# Does the tarball contain a spark-submit?
name=$(tar -tzf $spark | grep "spark-submit$")
if [ "$?" -ne 0 ]; then
echo Ignoring tarball $spark, no spark-submit
echo Ignoring tarball $spark, no spark-submit
continue
else
# See if we have an md5 file to match against
Expand All @@ -74,7 +74,7 @@ else

# dname will be the intial directory from the path of spark-submit
# we found in the tarball, ie the dir created by tar
echo Installing from tarball $spark
echo Installing from tarball $spark
dname=$(dirname $name | cut -d/ -f 1)
sparkdir=$SPARK_INSTALL/$dname
tar -xzf $spark -C $SPARK_INSTALL
Expand All @@ -87,7 +87,7 @@ else
# Search for the spark entrypoint file and copy it to $SPARK_INSTALL
entry=$(find $sparkdir/kubernetes -name entrypoint.sh)
if [ -n "$entry" ]; then
echo Copying spark entrypoint
echo Copying spark entrypoint
cp $entry $SPARK_INSTALL

# We have to patch the entrypoint to toggle error checking
Expand Down Expand Up @@ -117,7 +117,7 @@ else
echo Spark installed successfully
exit 0
else
echo Spark install failed
echo Spark install failed
fi

# Just in case there is more than one tarball, clean up
Expand Down
Empty file.
13 changes: 13 additions & 0 deletions overrides/python36-inc.yaml
@@ -0,0 +1,13 @@
# Python 3.6 override
# for more information see https://concreate.readthedocs.io/en/develop/overrides.html
schema_version: 1

envs:
- name: SCL_ENABLE_CMD
value: "scl enable rh-python36"
modules:
install:
- name: python36
- name: common
- name: metrics
- name: s2i

0 comments on commit 259f58f

Please sign in to comment.