Skip to content

Commit

Permalink
fix(components): Add staging and temp locations to prophet trainer co…
Browse files Browse the repository at this point in the history
…mponent

Signed-off-by: Michael Hu <humichael@google.com>
PiperOrigin-RevId: 631958163
  • Loading branch information
TheMichaelHu authored and Google Cloud Pipeline Components maintainers committed May 10, 2024
1 parent 5d9f4ab commit 00440f7
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 14 deletions.
1 change: 1 addition & 0 deletions components/google-cloud/RELEASE.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
## Upcoming release

## Release 2.15.0
* Add staging and temp location parameters to prophet trainer component.
* Add input parameter `autorater_prompt_parameters` to `_implementation.llm.online_evaluation_pairwise` component.
* Mitigate bug in `v1.model_evaluation.autosxs_pipeline` where batch prediction would fail the first time it is run in a project by retrying.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1461,7 +1461,7 @@ deploymentSpec:
\ = client.create_dataset(dataset=dataset, exists_ok=exists_ok)\n return\
\ collections.namedtuple('Outputs', ['project_id', 'dataset_id'])(\n \
\ ref.project, ref.dataset_id)\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-bigquery-delete-dataset-with-prefix:
container:
args:
Expand Down Expand Up @@ -1495,7 +1495,7 @@ deploymentSpec:
\ if dataset.dataset_id.startswith(dataset_prefix):\n client.delete_dataset(\n\
\ dataset=dataset.dataset_id,\n delete_contents=delete_contents)\n\
\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-bigquery-query-job:
container:
args:
Expand Down Expand Up @@ -1583,7 +1583,7 @@ deploymentSpec:
\ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
\ if write_disposition:\n config['write_disposition'] = write_disposition\n\
\ return config\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-build-job-configuration-query-2:
container:
args:
Expand Down Expand Up @@ -1617,7 +1617,7 @@ deploymentSpec:
\ 'datasetId': dataset_id,\n 'tableId': table_id,\n }\n\
\ if write_disposition:\n config['write_disposition'] = write_disposition\n\
\ return config\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-get-first-valid:
container:
args:
Expand All @@ -1641,7 +1641,7 @@ deploymentSpec:
\ import json\n # pylint: enable=g-import-not-at-top,import-outside-toplevel,redefined-outer-name,reimported\n\
\n for value in json.loads(values):\n if value:\n return value\n\
\ raise ValueError('No valid values.')\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-get-table-location:
container:
args:
Expand Down Expand Up @@ -1677,7 +1677,7 @@ deploymentSpec:
\ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
\ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
\ return client.get_table(table).location\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-get-table-location-2:
container:
args:
Expand Down Expand Up @@ -1713,7 +1713,7 @@ deploymentSpec:
\ if table.startswith('bq://'):\n table = table[len('bq://'):]\n elif\
\ table.startswith('bigquery://'):\n table = table[len('bigquery://'):]\n\
\ return client.get_table(table).location\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-load-table-from-uri:
container:
args:
Expand Down Expand Up @@ -1754,7 +1754,7 @@ deploymentSpec:
\ source_format=source_format)\n client.load_table_from_uri(\n source_uris=csv_list,\n\
\ destination=destination,\n project=project,\n location=location,\n\
\ job_config=job_config).result()\n return destination\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-make-vertex-model-artifact:
container:
args:
Expand All @@ -1778,7 +1778,7 @@ deploymentSpec:
Creates a google.VertexModel artifact.\"\"\"\n vertex_model.metadata =\
\ {'resourceName': model_resource_name}\n vertex_model.uri = (f'https://{location}-aiplatform.googleapis.com'\n\
\ f'/v1/{model_resource_name}')\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-maybe-replace-with-default:
container:
args:
Expand All @@ -1800,7 +1800,7 @@ deploymentSpec:
\ *\n\ndef maybe_replace_with_default(value: str, default: str = '') ->\
\ str:\n \"\"\"Replaces string with another value if it is a dash.\"\"\"\
\n return default if not value else value\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-model-batch-predict:
container:
args:
Expand Down Expand Up @@ -1879,7 +1879,7 @@ deploymentSpec:
\ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\
\ return collections.namedtuple(\n 'Outputs',\n ['project_id',\
\ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-table-to-uri-2:
container:
args:
Expand Down Expand Up @@ -1909,7 +1909,7 @@ deploymentSpec:
\ if use_bq_prefix:\n bq_uri = 'bq://' + bq_uri\n outputs.append(bq_uri)\n\
\ return collections.namedtuple(\n 'Outputs',\n ['project_id',\
\ 'dataset_id', 'table_id', 'uri'],\n )(*outputs)\n\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
exec-validate-inputs:
container:
args:
Expand Down Expand Up @@ -2005,7 +2005,7 @@ deploymentSpec:
\ raise ValueError(\n 'Granularity unit should be one of the\
\ following: '\n f'{valid_data_granularity_units}, got: {data_granularity_unit}.')\n\
\n"
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240214_1325
image: us-docker.pkg.dev/vertex-ai/automl-tabular/kfp-v2-base:20240419_0625
pipelineInfo:
description: Creates a batch prediction using a Prophet model.
name: prophet-predict
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,16 @@ def prophet_trainer(
'--dataflow_use_public_ips=',
dataflow_use_public_ips,
'", "',
'--dataflow_staging_dir=',
root_dir,
(
f'/{dsl.PIPELINE_JOB_ID_PLACEHOLDER}/{dsl.PIPELINE_TASK_ID_PLACEHOLDER}/dataflow_staging", "'
),
'--dataflow_tmp_dir=',
root_dir,
(
f'/{dsl.PIPELINE_JOB_ID_PLACEHOLDER}/{dsl.PIPELINE_TASK_ID_PLACEHOLDER}/dataflow_tmp", "'
),
'--gcp_resources_path=',
gcp_resources,
'", "',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2418,7 +2418,10 @@ deploymentSpec:
"\", \"", "--dataflow_service_account=", "{{$.inputs.parameters[''dataflow_service_account'']}}",
"\", \"", "--dataflow_subnetwork=", "{{$.inputs.parameters[''dataflow_subnetwork'']}}",
"\", \"", "--dataflow_use_public_ips=", "{{$.inputs.parameters[''dataflow_use_public_ips'']}}",
"\", \"", "--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}",
"\", \"", "--dataflow_staging_dir=", "{{$.inputs.parameters[''root_dir'']}}",
"/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_staging\", \"",
"--dataflow_tmp_dir=", "{{$.inputs.parameters[''root_dir'']}}", "/{{$.pipeline_job_uuid}}/{{$.pipeline_task_uuid}}/dataflow_tmp\",
\"", "--gcp_resources_path=", "{{$.outputs.parameters[''gcp_resources''].output_file}}",
"\", \"", "--executor_input={{$.json_escape[1]}}\"]}}]}}"]}'
command:
- python3
Expand Down

0 comments on commit 00440f7

Please sign in to comment.