From 2d7854a4a0e7488b71a43f4693056f19b080388e Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Fri, 30 Nov 2018 13:16:45 -0800 Subject: [PATCH 01/13] SDK/GCP - Replaced default_gcp_op with task.apply(use_gcp_secret) --- sdk/python/kfp/dsl/__init__.py | 1 - sdk/python/kfp/dsl/_container_op.py | 15 ++++++ sdk/python/kfp/dsl/_default_gcp_op.py | 69 --------------------------- sdk/python/kfp/gcp.py | 58 ++++++++++++++++++++++ 4 files changed, 73 insertions(+), 70 deletions(-) delete mode 100644 sdk/python/kfp/dsl/_default_gcp_op.py create mode 100644 sdk/python/kfp/gcp.py diff --git a/sdk/python/kfp/dsl/__init__.py b/sdk/python/kfp/dsl/__init__.py index 5e389b2fbfe3..ac44ae5e1c66 100644 --- a/sdk/python/kfp/dsl/__init__.py +++ b/sdk/python/kfp/dsl/__init__.py @@ -18,4 +18,3 @@ from ._container_op import ContainerOp from ._ops_group import OpsGroup, ExitHandler, Condition from ._component import python_component -from ._default_gcp_op import default_gcp_op diff --git a/sdk/python/kfp/dsl/_container_op.py b/sdk/python/kfp/dsl/_container_op.py index 763693cbc735..42523155f9ae 100644 --- a/sdk/python/kfp/dsl/_container_op.py +++ b/sdk/python/kfp/dsl/_container_op.py @@ -88,6 +88,21 @@ def __init__(self, name: str, image: str, command: str=None, arguments: str=None if len(self.outputs) == 1: self.output = list(self.outputs.values())[0] + def apply(self, mod_func): + """Applies a modifier function to self. The function should return the passed object. + This is needed to chain "extention methods" to this class. + + Example: + from kfp.gcp import use_gcp_secret + task = ( + train_op(...) + .set_memory_request('1GB') + .apply(use_gcp_secret('user-gcp-sa')) + .set_memory_limit('2GB') + ) + """ + return mod_func(self) + def after(self, op): """Specify explicit dependency on another op.""" self.dependent_op_names.append(op.name) diff --git a/sdk/python/kfp/dsl/_default_gcp_op.py b/sdk/python/kfp/dsl/_default_gcp_op.py deleted file mode 100644 index b88d59887dad..000000000000 --- a/sdk/python/kfp/dsl/_default_gcp_op.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import dsl -from typing import Dict - - -def default_gcp_op(name: str, image: str, command: str = None, - arguments: str = None, file_inputs: Dict[dsl.PipelineParam, str] = None, - file_outputs: Dict[str, str] = None, is_exit_handler=False): - """An operator that mounts the default GCP service account to the container. - - The user-gcp-sa secret is created as part of the kubeflow deployment that - stores the access token for kubeflow user service account. - - With this service account, the container has a range of GCP APIs to - access to. This service account is automatically created as part of the - kubeflow deployment. - - For the list of the GCP APIs this service account can access to, check - https://github.com/kubeflow/kubeflow/blob/7b0db0d92d65c0746ac52b000cbc290dac7c62b1/deployment/gke/deployment_manager_configs/iam_bindings_template.yaml#L18 - - If you want to call the GCP APIs in a different project, grant the kf-user - service account access permission. - """ - from kubernetes import client as k8s_client - - return ( - dsl.ContainerOp( - name, - image, - command, - arguments, - file_inputs, - file_outputs, - is_exit_handler, - ) - .add_volume( - k8s_client.V1Volume( - name='gcp-credentials', - secret=k8s_client.V1SecretVolumeSource( - secret_name='user-gcp-sa' - ) - ) - ) - .add_volume_mount( - k8s_client.V1VolumeMount( - mount_path='/secret/gcp-credentials', - name='gcp-credentials', - ) - ) - .add_env_variable( - k8s_client.V1EnvVar( - name='GOOGLE_APPLICATION_CREDENTIALS', - value='/secret/gcp-credentials/user-gcp-sa.json' - ) - ) - ) diff --git a/sdk/python/kfp/gcp.py b/sdk/python/kfp/gcp.py new file mode 100644 index 000000000000..e271d26e8d75 --- /dev/null +++ b/sdk/python/kfp/gcp.py @@ -0,0 +1,58 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def use_gcp_secret(secret_name='user-gcp-sa', secret_file_path_in_volume='/user-gcp-sa.json', volume_name='gcp-credentials', secret_volume_mount_path='/secret/gcp-credentials'): + """An operator that configures the container to use GCP service account. + + The user-gcp-sa secret is created as part of the kubeflow deployment that + stores the access token for kubeflow user service account. + + With this service account, the container has a range of GCP APIs to + access to. This service account is automatically created as part of the + kubeflow deployment. + + For the list of the GCP APIs this service account can access to, check + https://github.com/kubeflow/kubeflow/blob/7b0db0d92d65c0746ac52b000cbc290dac7c62b1/deployment/gke/deployment_manager_configs/iam_bindings_template.yaml#L18 + + If you want to call the GCP APIs in a different project, grant the kf-user + service account access permission. + """ + + def _use_gcp_secret(task): + from kubernetes import client as k8s_client + return ( + task + .add_volume( + k8s_client.V1Volume( + name=volume_name, + secret=k8s_client.V1SecretVolumeSource( + secret_name=secret_name, + ) + ) + ) + .add_volume_mount( + k8s_client.V1VolumeMount( + name=volume_name, + mount_path=secret_volume_mount_path, + ) + ) + .add_env_variable( + k8s_client.V1EnvVar( + name='GOOGLE_APPLICATION_CREDENTIALS', + value=secret_volume_mount_path + secret_file_path_in_volume, + ) + ) + ) + + return _use_gcp_secret From f4808d6b2540c92b5a67885fb53dead852841024 Mon Sep 17 00:00:00 2001 From: IronPan Date: Mon, 3 Dec 2018 17:00:15 -0800 Subject: [PATCH 02/13] add gcp permission to all samples --- .../kubeflow-training-classification.py | 9 +++++---- samples/tfx/taxi-cab-classification-pipeline.py | 11 ++++++----- samples/xgboost-spark/xgboost-training-cm.py | 15 ++++++++------- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/samples/kubeflow-tf/kubeflow-training-classification.py b/samples/kubeflow-tf/kubeflow-training-classification.py index 43911e52a95f..c01a1ddcb73b 100755 --- a/samples/kubeflow-tf/kubeflow-training-classification.py +++ b/samples/kubeflow-tf/kubeflow-training-classification.py @@ -15,6 +15,7 @@ import kfp.dsl as dsl +import kfp.gcp as gcp import datetime def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'): @@ -31,7 +32,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', sc '--output', transform_output, ], file_outputs = {'transformed': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training'): @@ -49,7 +50,7 @@ def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', l '--job-dir', training_output, ], file_outputs = {'train': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'): return dsl.ContainerOp( @@ -65,7 +66,7 @@ def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json] '--output', prediction_output, ], file_outputs = {'prediction': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def confusion_matrix_op(predictions, output, step_name='confusionmatrix'): return dsl.ContainerOp( @@ -75,7 +76,7 @@ def confusion_matrix_op(predictions, output, step_name='confusionmatrix'): '--predictions', predictions, '--output', output, ] - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) @dsl.pipeline( name='Pipeline TFJob', diff --git a/samples/tfx/taxi-cab-classification-pipeline.py b/samples/tfx/taxi-cab-classification-pipeline.py index 3c219b688d16..abfaf67f39bf 100755 --- a/samples/tfx/taxi-cab-classification-pipeline.py +++ b/samples/tfx/taxi-cab-classification-pipeline.py @@ -15,6 +15,7 @@ import kfp.dsl as dsl +import kfp.gcp as gcp import datetime def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'GcsUri', column_names: 'GcsUri[text/json]', key_columns, project: 'GcpProject', mode, validation_output: 'GcsUri[Directory]', step_name='validation'): @@ -34,7 +35,7 @@ def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'G 'output': '/output.txt', 'schema': '/output_schema.json', } - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'): return dsl.ContainerOp( @@ -50,7 +51,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', sc '--output', transform_output, ], file_outputs = {'transformed': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target: str, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training'): @@ -68,7 +69,7 @@ def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate '--job-dir', training_output, ], file_outputs = {'train': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', analyze_mode, analyze_slice_column, analysis_output: 'GcsUri', step_name='analysis'): return dsl.ContainerOp( @@ -84,7 +85,7 @@ def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'Gc '--output', analysis_output, ], file_outputs = {'analysis': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'): @@ -101,7 +102,7 @@ def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json] '--output', prediction_output, ], file_outputs = {'prediction': '/output.txt'} - ) + ).apply(gcp.use_gcp_secret('user-gcp-sa')) def kubeflow_deploy_op(model: 'TensorFlow model', tf_server_name, step_name='deploy'): return dsl.ContainerOp( diff --git a/samples/xgboost-spark/xgboost-training-cm.py b/samples/xgboost-spark/xgboost-training-cm.py index 8a1ed3ba448b..6d22b28d5a4b 100755 --- a/samples/xgboost-spark/xgboost-training-cm.py +++ b/samples/xgboost-spark/xgboost-training-cm.py @@ -15,6 +15,7 @@ import kfp.dsl as dsl +import kfp.gcp as gcp # ================================================================ @@ -179,26 +180,26 @@ def xgb_train_pipeline( ): delete_cluster_op = DeleteClusterOp('delete-cluster', project, region) with dsl.ExitHandler(exit_op=delete_cluster_op): - create_cluster_op = CreateClusterOp('create-cluster', project, region, output) + create_cluster_op = CreateClusterOp('create-cluster', project, region, output).apply(gcp.use_gcp_secret('user-gcp-sa')) analyze_op = AnalyzeOp('analyze', project, region, create_cluster_op.output, schema, - train_data, '%s/{{workflow.name}}/analysis' % output) + train_data, '%s/{{workflow.name}}/analysis' % output).apply(gcp.use_gcp_secret('user-gcp-sa')) transform_op = TransformOp('transform', project, region, create_cluster_op.output, train_data, eval_data, target, analyze_op.output, - '%s/{{workflow.name}}/transform' % output) + '%s/{{workflow.name}}/transform' % output).apply(gcp.use_gcp_secret('user-gcp-sa')) train_op = TrainerOp('train', project, region, create_cluster_op.output, transform_op.outputs['train'], transform_op.outputs['eval'], target, analyze_op.output, workers, - rounds, '%s/{{workflow.name}}/model' % output) + rounds, '%s/{{workflow.name}}/model' % output).apply(gcp.use_gcp_secret('user-gcp-sa')) predict_op = PredictOp('predict', project, region, create_cluster_op.output, transform_op.outputs['eval'], - train_op.output, target, analyze_op.output, '%s/{{workflow.name}}/predict' % output) + train_op.output, target, analyze_op.output, '%s/{{workflow.name}}/predict' % output).apply(gcp.use_gcp_secret('user-gcp-sa')) confusion_matrix_op = ConfusionMatrixOp('confusion-matrix', predict_op.output, - '%s/{{workflow.name}}/confusionmatrix' % output) + '%s/{{workflow.name}}/confusionmatrix' % output).apply(gcp.use_gcp_secret('user-gcp-sa')) - roc_op = RocOp('roc', predict_op.output, true_label, '%s/{{workflow.name}}/roc' % output) + roc_op = RocOp('roc', predict_op.output, true_label, '%s/{{workflow.name}}/roc' % output).apply(gcp.use_gcp_secret('user-gcp-sa')) if __name__ == '__main__': import kfp.compiler as compiler From e56438c0d882912baaa0e6d86f0e390856715c53 Mon Sep 17 00:00:00 2001 From: IronPan Date: Mon, 3 Dec 2018 21:34:31 -0800 Subject: [PATCH 03/13] remove GKE dependency for tf serving --- components/kubeflow/deployer/src/deploy.sh | 27 +++++----------------- 1 file changed, 6 insertions(+), 21 deletions(-) diff --git a/components/kubeflow/deployer/src/deploy.sh b/components/kubeflow/deployer/src/deploy.sh index f13c8e26e40c..8a310c740528 100755 --- a/components/kubeflow/deployer/src/deploy.sh +++ b/components/kubeflow/deployer/src/deploy.sh @@ -26,16 +26,6 @@ while (($#)); do MODEL_PATH="$1" shift ;; - "--project") - shift - PROJECT="$1" - shift - ;; - "--zone") - shift - ZONE="$1" - shift - ;; "--cluster-name") shift CLUSTER_NAME="$1" @@ -65,27 +55,22 @@ fi echo "Deploying the model '${MODEL_PATH}'" -if [ -z "${PROJECT}" ]; then - PROJECT=$(wget -q -O- --header="Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/project/project-id) -fi - if [ -z "${CLUSTER_NAME}" ]; then CLUSTER_NAME=$(wget -q -O- --header="Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/attributes/cluster-name) fi - -if [ -z "${ZONE}" ]; then - ZONE=$(wget -q -O- --header="Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/zone | cut -d '/' -f 4) -fi # Ensure the server name is not more than 63 characters. SERVER_NAME="${SERVER_NAME:0:63}" # Trim any trailing hyphens from the server name. while [[ "${SERVER_NAME:(-1)}" == "-" ]]; do SERVER_NAME="${SERVER_NAME::-1}"; done -echo "Deploying ${SERVER_NAME} to the cluster ${CLUSTER_NAME} in the project ${PROJECT} and the zone ${ZONE}..." +echo "Deploying ${SERVER_NAME} to the cluster ${CLUSTER_NAME}" -# Connect kubectl to the cluster -gcloud --project "${PROJECT}" container clusters get-credentials "${CLUSTER_NAME}" --zone "${ZONE}" +# Connect kubectl to the local cluster +kubectl config set-cluster "${CLUSTER_NAME}" --server=https://kubernetes.default --certificate-authority=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt +kubectl config set-credentials pipeline --token "$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" +kubectl config set-context kubeflow --cluster "${CLUSTER_NAME}" --user pipeline +kubectl config use-context kubeflow # Configure and deploy the TF serving app cd /src/github.com/kubeflow/kubeflow From ccc73c2645976239b38e9d568fe3f8e1d814b299 Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 00:41:01 -0800 Subject: [PATCH 04/13] add permission --- pipeline/pipeline/pipeline-apiserver.libsonnet | 1 + 1 file changed, 1 insertion(+) diff --git a/pipeline/pipeline/pipeline-apiserver.libsonnet b/pipeline/pipeline/pipeline-apiserver.libsonnet index d562b990ed5b..bb0224a26306 100644 --- a/pipeline/pipeline/pipeline-apiserver.libsonnet +++ b/pipeline/pipeline/pipeline-apiserver.libsonnet @@ -255,6 +255,7 @@ resources: [ "pods", "pods/exec", + "pods/log", "services", ], verbs: [ From cbf29003e9f8a83bd8e13a702ef438046de2a789 Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 02:03:36 -0800 Subject: [PATCH 05/13] debug --- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index 18de0240afab..c8d1b0fc1cb8 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -trap clean_up EXIT +# trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject From 3d2738206ee5527e440d05ed458bbeddf3b5b7d9 Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 11:46:39 -0800 Subject: [PATCH 06/13] fix --- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index c8d1b0fc1cb8..18de0240afab 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -# trap clean_up EXIT +trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject From be50c60d6af563093e6267c61e019ab0e7f59446 Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 14:08:24 -0800 Subject: [PATCH 07/13] fix --- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index 18de0240afab..c8d1b0fc1cb8 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -trap clean_up EXIT +# trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject From d06c312bcc04a9c4d663b217acdea52770453c1b Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 16:13:50 -0800 Subject: [PATCH 08/13] fix test --- components/dataproc/xgboost/common/_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/components/dataproc/xgboost/common/_utils.py b/components/dataproc/xgboost/common/_utils.py index 0c4a5c136dba..919a57fdd33d 100644 --- a/components/dataproc/xgboost/common/_utils.py +++ b/components/dataproc/xgboost/common/_utils.py @@ -134,6 +134,7 @@ def copy_resources_to_gcs(file_paths, gcs_path): dest_files = [] for file_name in file_paths: dest_file = os.path.join(gcs_path, tmpdir, os.path.basename(file_name)) + subprocess.call(['gcloud', 'auth', 'activate-service-account', '--key-file=${GOOGLE_APPLICATION_CREDENTIALS}']) subprocess.call(['gsutil', 'cp', file_name, dest_file]) dest_files.append(dest_file) From 69ed2cdf74965489a7033721b3dd2bbfd67efc1b Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 16:13:58 -0800 Subject: [PATCH 09/13] fix test --- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index c8d1b0fc1cb8..18de0240afab 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -# trap clean_up EXIT +trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject From c61009053f71002d597954c4d6e6a4bb3ba30dec Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 17:18:50 -0800 Subject: [PATCH 10/13] fix credential --- components/dataproc/xgboost/common/_utils.py | 2 +- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/components/dataproc/xgboost/common/_utils.py b/components/dataproc/xgboost/common/_utils.py index 919a57fdd33d..84b403018749 100644 --- a/components/dataproc/xgboost/common/_utils.py +++ b/components/dataproc/xgboost/common/_utils.py @@ -134,7 +134,7 @@ def copy_resources_to_gcs(file_paths, gcs_path): dest_files = [] for file_name in file_paths: dest_file = os.path.join(gcs_path, tmpdir, os.path.basename(file_name)) - subprocess.call(['gcloud', 'auth', 'activate-service-account', '--key-file=${GOOGLE_APPLICATION_CREDENTIALS}']) + subprocess.call(['gcloud', 'auth', 'activate-service-account', '--key-file="${GOOGLE_APPLICATION_CREDENTIALS}"']) subprocess.call(['gsutil', 'cp', file_name, dest_file]) dest_files.append(dest_file) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index 18de0240afab..c8d1b0fc1cb8 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -trap clean_up EXIT +# trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject From f688b9c81a2155ee364eab6238fa1d00b449a22d Mon Sep 17 00:00:00 2001 From: IronPan Date: Tue, 4 Dec 2018 17:25:45 -0800 Subject: [PATCH 11/13] Update presubmit-tests-with-pipeline-deployment.sh --- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index c8d1b0fc1cb8..18de0240afab 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -# trap clean_up EXIT +trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject From db2772108f5779475c91bf3eb1f870a1cd4da1df Mon Sep 17 00:00:00 2001 From: IronPan Date: Wed, 5 Dec 2018 01:57:58 -0800 Subject: [PATCH 12/13] Update _utils.py --- components/dataproc/xgboost/common/_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/dataproc/xgboost/common/_utils.py b/components/dataproc/xgboost/common/_utils.py index 84b403018749..110afb2e1cb2 100644 --- a/components/dataproc/xgboost/common/_utils.py +++ b/components/dataproc/xgboost/common/_utils.py @@ -134,7 +134,7 @@ def copy_resources_to_gcs(file_paths, gcs_path): dest_files = [] for file_name in file_paths: dest_file = os.path.join(gcs_path, tmpdir, os.path.basename(file_name)) - subprocess.call(['gcloud', 'auth', 'activate-service-account', '--key-file="${GOOGLE_APPLICATION_CREDENTIALS}"']) + subprocess.call(['gcloud', 'auth', 'activate-service-account', '--key-file', os.environ['GOOGLE_APPLICATION_CREDENTIALS']]) subprocess.call(['gsutil', 'cp', file_name, dest_file]) dest_files.append(dest_file) From 2a21822b1615480091e4ea908a2689dca75b708a Mon Sep 17 00:00:00 2001 From: IronPan Date: Wed, 5 Dec 2018 03:12:16 -0800 Subject: [PATCH 13/13] fix credential --- test/presubmit-tests-with-pipeline-deployment.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/presubmit-tests-with-pipeline-deployment.sh b/test/presubmit-tests-with-pipeline-deployment.sh index 18de0240afab..c8d1b0fc1cb8 100755 --- a/test/presubmit-tests-with-pipeline-deployment.sh +++ b/test/presubmit-tests-with-pipeline-deployment.sh @@ -115,7 +115,7 @@ function clean_up { cd ${KFAPP} ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all } -trap clean_up EXIT +# trap clean_up EXIT ${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform gcp --project ${PROJECT} --skipInitProject