Skip to content

Commit

Permalink
fix PYTHONPATH and PATH
Browse files Browse the repository at this point in the history
  • Loading branch information
Qi77Qi committed Jan 8, 2022
1 parent 6b6aaa6 commit e601f31
Show file tree
Hide file tree
Showing 7 changed files with 27 additions and 4 deletions.
Expand Up @@ -31,6 +31,10 @@ services:
PIP_USER: "false"
PIP_TARGET: "${NOTEBOOKS_DIR}/packages"
R_LIBS: "${NOTEBOOKS_DIR}/packages"
# The next two lines aren't great. But they're for updating PYTHONPATH, PATH in older than (inclusive) us.gcr.io/broad-dsp-gcr-public/terra-jupyter-base:1.0.2
# We should remove the two lines once we no longer support older images. In the meantime, we need to be careful updating Jupyter base images.
PYTHONPATH: "/etc/jupyter/custom:/usr/lib/spark/python:${NOTEBOOKS_DIR}/packages"
PATH: "/opt/conda/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:${NOTEBOOKS_DIR}/.local/bin:${NOTEBOOKS_DIR}/packages/bin"
env_file:
- /var/google_application_credentials.env
- /var/custom_env_vars.env
Expand Down
Expand Up @@ -45,6 +45,10 @@ services:
WELDER_ENABLED: "${WELDER_ENABLED}"
NOTEBOOKS_DIR: "${NOTEBOOKS_DIR}"
MEM_LIMIT: "${MEM_LIMIT}"
# The next two lines aren't great. But they're for updating PYTHONPATH, PATH in older than (inclusive) us.gcr.io/broad-dsp-gcr-public/terra-jupyter-base:1.0.2.
# We should remove the two lines once we no longer support older images
PYTHONPATH: "/etc/jupyter/custom:/usr/lib/spark/python:${NOTEBOOKS_DIR}/packages"
PATH: "/opt/conda/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:${NOTEBOOKS_DIR}/.local/bin:${NOTEBOOKS_DIR}/packages/bin"
env_file:
- /var/google_application_credentials.env
- /var/custom_env_vars.env
Expand Down
6 changes: 5 additions & 1 deletion http/src/main/resources/init-resources/startup.sh
Expand Up @@ -68,6 +68,7 @@ export START_USER_SCRIPT_URI=$(startUserScriptUri)
export START_USER_SCRIPT_OUTPUT_URI=$(startUserScriptOutputUri)
export WELDER_MEM_LIMIT=$(welderMemLimit)
export MEM_LIMIT=$(memLimit)
export INIT_BUCKET_NAME=$(initBucketName)
export USE_GCE_STARTUP_SCRIPT=$(useGceStartupScript)
GPU_ENABLED=$(gpuEnabled)
export IS_RSTUDIO_RUNTIME="false" # TODO: update to commented out code once we release Rmd file syncing
Expand Down Expand Up @@ -107,6 +108,8 @@ then
if [ ! -z "$JUPYTER_DOCKER_IMAGE" ] ; then
echo "Restarting Jupyter Container $GOOGLE_PROJECT / $CLUSTER_NAME..."

$GSUTIL_CMD cp gs://${INIT_BUCKET_NAME}/`basename ${JUPYTER_DOCKER_COMPOSE}` $JUPYTER_DOCKER_COMPOSE

tee /var/variables.env << END
JUPYTER_SERVER_NAME=${JUPYTER_SERVER_NAME}
JUPYTER_DOCKER_IMAGE=${JUPYTER_DOCKER_IMAGE}
Expand Down Expand Up @@ -134,6 +137,7 @@ else

if [ ! -z "$JUPYTER_DOCKER_IMAGE" ] ; then
echo "Restarting Jupyter Container $GOOGLE_PROJECT / $CLUSTER_NAME..."
$GSUTIL_CMD cp gs://${INIT_BUCKET_NAME}/`basename ${JUPYTER_DOCKER_COMPOSE}` $JUPYTER_DOCKER_COMPOSE

${DOCKER_COMPOSE} -f ${JUPYTER_DOCKER_COMPOSE} stop
${DOCKER_COMPOSE} -f ${JUPYTER_DOCKER_COMPOSE} rm -f
Expand Down Expand Up @@ -256,7 +260,7 @@ if [ ! -z "$JUPYTER_DOCKER_IMAGE" ] ; then
# kernel tries to connect to it.
docker exec $JUPYTER_SERVER_NAME /bin/bash -c "R -e '1+1'" || true

docker exec -d $JUPYTER_SERVER_NAME /bin/bash -c "export WELDER_ENABLED=$WELDER_ENABLED && export NOTEBOOKS_DIR=$NOTEBOOKS_DIR && export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}${NOTEBOOKS_DIR}/packages && export PATH=${PATH:+$PATH:}${NOTEBOOKS_DIR}/packages/bin && (/etc/jupyter/scripts/run-jupyter.sh $NOTEBOOKS_DIR || /opt/conda/bin/jupyter notebook)"
docker exec -d $JUPYTER_SERVER_NAME /bin/bash -c "export WELDER_ENABLED=$WELDER_ENABLED && export NOTEBOOKS_DIR=$NOTEBOOKS_DIR && (/etc/jupyter/scripts/run-jupyter.sh $NOTEBOOKS_DIR || /opt/conda/bin/jupyter notebook)"

if [ "$WELDER_ENABLED" == "true" ] ; then
# fix for https://broadworkbench.atlassian.net/browse/IA-1453
Expand Down
Expand Up @@ -22,7 +22,8 @@ import scala.util.Try

abstract private[util] class BaseRuntimeInterpreter[F[_]](
config: RuntimeInterpreterConfig,
welderDao: WelderDAO[F]
welderDao: WelderDAO[F],
bucketHelper: BucketHelper[F]
)(implicit F: Async[F],
dbRef: DbReference[F],
metrics: OpenTelemetryMetrics[F],
Expand Down Expand Up @@ -83,6 +84,9 @@ abstract private[util] class BaseRuntimeInterpreter[F[_]](
}
.map(_.getOrElse(params.runtimeAndRuntimeConfig.runtime))

// Re-upload Jupyter Docker Compose file to init bucket for updating environment variables in Jupyter
_ <- bucketHelper.uploadFileToInitBucket(params.initBucket, config.clusterResourcesConfig.jupyterDockerCompose)

startGoogleRuntimeReq = StartGoogleRuntime(params.runtimeAndRuntimeConfig.copy(runtime = updatedRuntime),
params.initBucket,
welderAction)
Expand Down
Expand Up @@ -90,6 +90,13 @@ class BucketHelper[F[_]](
.drain
} yield ()

def uploadFileToInitBucket(initBucketName: GcsBucketName, runtimeResource: RuntimeResource): F[Unit] =
(TemplateHelper.resourceStream[F](runtimeResource) through google2StorageDAO
.streamUploadBlob(
initBucketName,
GcsBlobName(runtimeResource.asString)
)).compile.drain

def initializeBucketObjects(
initBucketName: GcsBucketName,
serviceAccountKey: Option[ServiceAccountKey],
Expand Down
Expand Up @@ -105,7 +105,7 @@ class DataprocInterpreter[F[_]: Parallel](
metrics: OpenTelemetryMetrics[F],
logger: StructuredLogger[F],
dbRef: DbReference[F])
extends BaseRuntimeInterpreter[F](config, welderDao)
extends BaseRuntimeInterpreter[F](config, welderDao, bucketHelper)
with RuntimeAlgebra[F]
with LazyLogging {

Expand Down
Expand Up @@ -52,7 +52,7 @@ class GceInterpreter[F[_]](
dbRef: DbReference[F],
F: Async[F],
logger: StructuredLogger[F])
extends BaseRuntimeInterpreter[F](config, welderDao)
extends BaseRuntimeInterpreter[F](config, welderDao, bucketHelper)
with RuntimeAlgebra[F] {
override def createRuntime(
params: CreateRuntimeParams
Expand Down

0 comments on commit e601f31

Please sign in to comment.