Skip to content

Commit

Permalink
chore(components): GCPC 2.14.1 Release
Browse files Browse the repository at this point in the history
Signed-off-by: Googler <nobody@google.com>
PiperOrigin-RevId: 632255798
  • Loading branch information
Googler committed May 10, 2024
1 parent 7c63599 commit 5a7b39a
Show file tree
Hide file tree
Showing 10 changed files with 40 additions and 20 deletions.
2 changes: 1 addition & 1 deletion components/google-cloud/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ RUN pip3 install -U "fsspec>=0.7.4" "gcsfs>=0.6.0" "pandas<=1.3.5" "scikit-learn
RUN pip3 install -U google-cloud-notebooks

# Install main package
RUN pip3 install "git+https://github.com/kubeflow/pipelines.git@google-cloud-pipeline-components-2.14.0#egg=google-cloud-pipeline-components&subdirectory=components/google-cloud"
RUN pip3 install "git+https://github.com/kubeflow/pipelines.git@google-cloud-pipeline-components-2.14.1#egg=google-cloud-pipeline-components&subdirectory=components/google-cloud"

# Note that components can override the container entry ponint.
ENTRYPOINT ["python3","-m","google_cloud_pipeline_components.container.v1.aiplatform.remote_runner"]
5 changes: 4 additions & 1 deletion components/google-cloud/RELEASE.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
## Upcoming release

## Release 2.15.0
## Release 2.14.1
* Add staging and temp location parameters to prophet trainer component.
* Add input parameter `autorater_prompt_parameters` to `_implementation.llm.online_evaluation_pairwise` component.
* Mitigate bug in `v1.model_evaluation.autosxs_pipeline` where batch prediction would fail the first time it is run in a project by retrying.
* Apply latest GCPC image vulnerability resolutions (base OS and software updates).

## Release 2.14.0
* Use larger base reward model when tuning `text-bison@001`, `chat-bison@001` and `t5-xxl` with the `preview.llm.rlhf_pipeline`.
Expand Down
2 changes: 1 addition & 1 deletion components/google-cloud/docs/.readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ python:
build:
os: ubuntu-22.04
tools:
python: "3.7"
python: "3.8"
5 changes: 5 additions & 0 deletions components/google-cloud/docs/source/versions.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
[
{
"version": "https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-2.14.1",
"title": "2.14.1",
"aliases": []
},
{
"version": "https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-2.14.0",
"title": "2.14.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
DO NOT EDIT - This file is generated, manual changes will be overridden.
"""

IMAGE_TAG = '20240506_1530_RC00'
IMAGE_TAG = '20240506_1707'
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from google_cloud_pipeline_components import _placeholders
from google_cloud_pipeline_components._implementation.llm import bulk_inferrer
from google_cloud_pipeline_components._implementation.llm import env
from google_cloud_pipeline_components._implementation.llm import function_based
from google_cloud_pipeline_components._implementation.llm import infer_preprocessor
from google_cloud_pipeline_components._implementation.llm import preprocess_chat_dataset
from google_cloud_pipeline_components._implementation.llm import private_text_importer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1461,7 +1461,7 @@ deploymentSpec:
\ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\
\ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \
\ ref.project, ref.dataset_id)\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-bigquery-delete-dataset-with-prefix:
container:
args:
Expand Down Expand Up @@ -1495,7 +1495,7 @@ deploymentSpec:
\ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\
\ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\
\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-bigquery-query-job:
container:
args:
Expand Down Expand Up @@ -1583,7 +1583,7 @@ deploymentSpec:
\ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
\ if write_disposition:\n config['write_disposition'] = write_disposition\n\
\ return config\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-build-job-configuration-query-2:
container:
args:
Expand Down Expand Up @@ -1617,7 +1617,7 @@ deploymentSpec:
\ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
\ if write_disposition:\n config['write_disposition'] = write_disposition\n\
\ return config\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-get-first-valid:
container:
args:
Expand All @@ -1641,7 +1641,7 @@ deploymentSpec:
\ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\
\n for value in json.loads(values):\n if value:\n return value\n\
\ raise ValueError('No valid values.')\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-get-table-location:
container:
args:
Expand Down Expand Up @@ -1677,7 +1677,7 @@ deploymentSpec:
\ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
\ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
\ return client.get_table(table).location\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-get-table-location-2:
container:
args:
Expand Down Expand Up @@ -1713,7 +1713,7 @@ deploymentSpec:
\ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
\ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
\ return client.get_table(table).location\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-load-table-from-uri:
container:
args:
Expand Down Expand Up @@ -1754,7 +1754,7 @@ deploymentSpec:
\ source_format=source_format)\n client.load_table_from_uri(\n source_uris=csv_list,\n\
\ destination=destination,\n project=project,\n location=location,\n\
\ job_config=job_config).result()\n return destination\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-make-vertex-model-artifact:
container:
args:
Expand All @@ -1778,7 +1778,7 @@ deploymentSpec:
Creates a google.VertexModel artifact.\"\"\"\n vertex_model.metadata =\
\ {'resourceName': model_resource_name}\n vertex_model.uri = (f'https://{location}-aiplatform.googleapis.com'\n\
\ f'/v1/{model_resource_name}')\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-maybe-replace-with-default:
container:
args:
Expand All @@ -1800,7 +1800,7 @@ deploymentSpec:
\ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\
\ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\
\n return default if not value else value\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-model-batch-predict:
container:
args:
Expand Down Expand Up @@ -1879,7 +1879,7 @@ deploymentSpec:
\ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\
\ return collections.namedtuple(\n 'Outputs',\n ['project_id',\
\ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-table-to-uri-2:
container:
args:
Expand Down Expand Up @@ -1909,7 +1909,7 @@ deploymentSpec:
\ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\
\ return collections.namedtuple(\n 'Outputs',\n ['project_id',\
\ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-validate-inputs:
container:
args:
Expand Down Expand Up @@ -2005,7 +2005,7 @@ deploymentSpec:
\ raise ValueError(\n 'Granularity unit should be one of the\
\ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\
\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
pipelineInfo:
description: Creates a batch prediction using a Prophet model.
name: prophet-predict
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,16 @@ def prophet_trainer(
'--dataflow_use_public_ips=',
dataflow_use_public_ips,
'", "',
'--dataflow_staging_dir=',
root_dir,
(
f'/{dsl.PIPELINE_JOB_ID_PLACEHOLDER}/{dsl.PIPELINE_TASK_ID_PLACEHOLDER}/dataflow_staging", "'
),
'--dataflow_tmp_dir=',
root_dir,
(
f'/{dsl.PIPELINE_JOB_ID_PLACEHOLDER}/{dsl.PIPELINE_TASK_ID_PLACEHOLDER}/dataflow_tmp", "'
),
'--gcp_resources_path=',
gcp_resources,
'", "',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2418,7 +2418,10 @@ deploymentSpec:
"\", \"", "--dataflow_service_account=", "{{$.inputs.parameters[''dataflow_service_account'']}}",
"\", \"", "--dataflow_subnetwork=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}",
"\", \"", "--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}",
"\", \"", "--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}",
"\", \"", "--dataflow_staging_dir=", "{{$.inputs.parameters[''root_dir'']}}",
"/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_staging\", \"",
"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\",
\"", "--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}",
"\", \"", "--executor_input={{$.json_escape[1]}}\"]}}]}}"]}'
command:
- python3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# limitations under the License.
"""Google Cloud Pipeline Components version."""

__version__ = "2.14.0"
__version__ = "2.14.1"

0 comments on commit 5a7b39a

Please sign in to comment.