Skip to content

Commit

Permalink
Release 0517114 (#1859)
Browse files Browse the repository at this point in the history
* Updated component images to version 0517114

* Updated components to version 48dd338
  • Loading branch information
neuromage authored and k8s-ci-robot committed Aug 16, 2019
1 parent 7dbca1a commit 0d898cb
Show file tree
Hide file tree
Showing 58 changed files with 88 additions and 88 deletions.
2 changes: 1 addition & 1 deletion components/dataflow/predict/component.yaml
Expand Up @@ -15,7 +15,7 @@ outputs:
- {name: Predictions dir, type: GCSPath, description: 'GCS or local directory.'} #Will contain prediction_results-* and schema.json files; TODO: Split outputs and replace dir with single file # type: {GCSPath: {path_type: Directory}}
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0517114dc2b365a4a6d95424af6157ead774eff3
command: [python2, /ml/predict.py]
args: [
--data, {inputValue: Data file pattern},
Expand Down
2 changes: 1 addition & 1 deletion components/dataflow/tfdv/component.yaml
Expand Up @@ -18,7 +18,7 @@ outputs:
- {name: Validation result, type: String, description: Indicates whether anomalies were detected or not.}
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:0517114dc2b365a4a6d95424af6157ead774eff3
command: [python2, /ml/validate.py]
args: [
--csv-data-for-inference, {inputValue: Inference data},
Expand Down
2 changes: 1 addition & 1 deletion components/dataflow/tfma/component.yaml
Expand Up @@ -17,7 +17,7 @@ outputs:
- {name: Analysis results dir, type: GCSPath, description: GCS or local directory where the analysis results should were written.} # type: {GCSPath: {path_type: Directory}}
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0517114dc2b365a4a6d95424af6157ead774eff3
command: [python2, /ml/model_analysis.py]
args: [
--model, {inputValue: Model},
Expand Down
2 changes: 1 addition & 1 deletion components/dataflow/tft/component.yaml
Expand Up @@ -12,7 +12,7 @@ outputs:
- {name: Transformed data dir, type: GCSPath} # type: {GCSPath: {path_type: Directory}}
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0517114dc2b365a4a6d95424af6157ead774eff3
command: [python2, /ml/transform.py]
args: [
--train, {inputValue: Training data file pattern},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/bigquery/query/README.md
Expand Up @@ -89,7 +89,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

bigquery_query_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/bigquery/query/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/bigquery/query/component.yaml')
help(bigquery_query_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/bigquery/query/component.yaml
Expand Up @@ -57,7 +57,7 @@ outputs:
type: GCSPath
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.bigquery, query,
--query, {inputValue: query},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/bigquery/query/sample.ipynb
Expand Up @@ -108,7 +108,7 @@
"import kfp.components as comp\n",
"\n",
"bigquery_query_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/bigquery/query/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/bigquery/query/component.yaml')\n",
"help(bigquery_query_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataflow/launch_python/README.md
Expand Up @@ -77,7 +77,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataflow_python_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataflow/launch_python/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_python/component.yaml')
help(dataflow_python_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataflow/launch_python/component.yaml
Expand Up @@ -51,7 +51,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataflow, launch_python,
--python_file_path, {inputValue: python_file_path},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataflow/launch_python/sample.ipynb
Expand Up @@ -95,7 +95,7 @@
"import kfp.components as comp\n",
"\n",
"dataflow_python_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataflow/launch_python/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_python/component.yaml')\n",
"help(dataflow_python_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataflow/launch_template/README.md
Expand Up @@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataflow_template_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataflow/launch_template/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_template/component.yaml')
help(dataflow_template_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataflow/launch_template/component.yaml
Expand Up @@ -61,7 +61,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataflow, launch_template,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataflow/launch_template/sample.ipynb
Expand Up @@ -85,7 +85,7 @@
"import kfp.components as comp\n",
"\n",
"dataflow_template_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataflow/launch_template/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_template/component.yaml')\n",
"help(dataflow_template_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/create_cluster/README.md
Expand Up @@ -74,7 +74,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_create_cluster_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/create_cluster/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/create_cluster/component.yaml')
help(dataproc_create_cluster_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/create_cluster/component.yaml
Expand Up @@ -68,7 +68,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, create_cluster,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/create_cluster/sample.ipynb
Expand Up @@ -92,7 +92,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_create_cluster_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/create_cluster/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/create_cluster/component.yaml')\n",
"help(dataproc_create_cluster_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/delete_cluster/README.md
Expand Up @@ -56,7 +56,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_delete_cluster_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/delete_cluster/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/delete_cluster/component.yaml')
help(dataproc_delete_cluster_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/delete_cluster/component.yaml
Expand Up @@ -36,7 +36,7 @@ inputs:
type: Integer
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, delete_cluster,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/delete_cluster/sample.ipynb
Expand Up @@ -75,7 +75,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_delete_cluster_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/delete_cluster/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/delete_cluster/component.yaml')\n",
"help(dataproc_delete_cluster_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_hadoop_job/README.md
Expand Up @@ -72,7 +72,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_submit_hadoop_job_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_hadoop_job/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hadoop_job/component.yaml')
help(dataproc_submit_hadoop_job_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_hadoop_job/component.yaml
Expand Up @@ -78,7 +78,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, submit_hadoop_job,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_hadoop_job/sample.ipynb
Expand Up @@ -90,7 +90,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n",
"help(dataproc_submit_hadoop_job_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_hive_job/README.md
Expand Up @@ -63,7 +63,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_submit_hive_job_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_hive_job/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hive_job/component.yaml')
help(dataproc_submit_hive_job_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_hive_job/component.yaml
Expand Up @@ -73,7 +73,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, submit_hive_job,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_hive_job/sample.ipynb
Expand Up @@ -81,7 +81,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_hive_job_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_hive_job/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hive_job/component.yaml')\n",
"help(dataproc_submit_hive_job_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_pig_job/README.md
Expand Up @@ -66,7 +66,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_submit_pig_job_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_pig_job/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pig_job/component.yaml')
help(dataproc_submit_pig_job_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_pig_job/component.yaml
Expand Up @@ -73,7 +73,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, submit_pig_job,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_pig_job/sample.ipynb
Expand Up @@ -84,7 +84,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_pig_job_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_pig_job/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pig_job/component.yaml')\n",
"help(dataproc_submit_pig_job_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_pyspark_job/README.md
Expand Up @@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_submit_pyspark_job_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_pyspark_job/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pyspark_job/component.yaml')
help(dataproc_submit_pyspark_job_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_pyspark_job/component.yaml
Expand Up @@ -67,7 +67,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, submit_pyspark_job,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_pyspark_job/sample.ipynb
Expand Up @@ -86,7 +86,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n",
"help(dataproc_submit_pyspark_job_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_spark_job/README.md
Expand Up @@ -80,7 +80,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_submit_spark_job_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_spark_job/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_spark_job/component.yaml')
help(dataproc_submit_spark_job_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_spark_job/component.yaml
Expand Up @@ -74,7 +74,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, submit_spark_job,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_spark_job/sample.ipynb
Expand Up @@ -99,7 +99,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_spark_job_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_spark_job/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_spark_job/component.yaml')\n",
"help(dataproc_submit_spark_job_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_sparksql_job/README.md
Expand Up @@ -62,7 +62,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

dataproc_submit_sparksql_job_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_sparksql_job/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_sparksql_job/component.yaml')
help(dataproc_submit_sparksql_job_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_sparksql_job/component.yaml
Expand Up @@ -73,7 +73,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.dataproc, submit_sparksql_job,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/dataproc/submit_sparksql_job/sample.ipynb
Expand Up @@ -81,7 +81,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n",
"help(dataproc_submit_sparksql_job_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/ml_engine/batch_predict/README.md
Expand Up @@ -94,7 +94,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

mlengine_batch_predict_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/ml_engine/batch_predict/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/batch_predict/component.yaml')
help(mlengine_batch_predict_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/ml_engine/batch_predict/component.yaml
Expand Up @@ -67,7 +67,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.ml_engine, batch_predict,
--project_id, {inputValue: project_id},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/ml_engine/batch_predict/sample.ipynb
Expand Up @@ -112,7 +112,7 @@
"import kfp.components as comp\n",
"\n",
"mlengine_batch_predict_op = comp.load_component_from_url(\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/ml_engine/batch_predict/component.yaml')\n",
" 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/batch_predict/component.yaml')\n",
"help(mlengine_batch_predict_op)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion components/gcp/ml_engine/deploy/README.md
Expand Up @@ -110,7 +110,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp

mlengine_deploy_op = comp.load_component_from_url(
'https://raw.githubusercontent.com/kubeflow/pipelines/0b07e456b1f319d8b7a7301274f55c00fda9f537/components/gcp/ml_engine/deploy/component.yaml')
'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/deploy/component.yaml')
help(mlengine_deploy_op)
```

Expand Down
2 changes: 1 addition & 1 deletion components/gcp/ml_engine/deploy/component.yaml
Expand Up @@ -93,7 +93,7 @@ outputs:
type: String
implementation:
container:
image: gcr.io/ml-pipeline/ml-pipeline-gcp:fe639f41661d8e17fcda64ff8242127620b80ba0
image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
args: [
kfp_component.google.ml_engine, deploy,
--model_uri, {inputValue: model_uri},
Expand Down

0 comments on commit 0d898cb

Please sign in to comment.