Skip to content

Commit

Permalink
feat: add annotation_labels to ImportDataConfig in aiplatform v1 data…
Browse files Browse the repository at this point in the history
…set.proto

feat: add start_time to BatchReadFeatureValuesRequest in aiplatform v1 featurestore_service.proto
feat: add metadata_artifact to Model in aiplatform v1 model.proto
feat: add failed_main_jobs and failed_pre_caching_check_jobs to ContainerDetail in aiplatform v1 pipeline_job.proto
feat: add persist_ml_use_assignment to InputDataConfig in aiplatform v1 training_pipeline.proto

PiperOrigin-RevId: 485963171
  • Loading branch information
Google APIs authored and copybara-github committed Nov 3, 2022
1 parent af14709 commit 9691f51
Show file tree
Hide file tree
Showing 13 changed files with 61 additions and 18 deletions.
1 change: 1 addition & 0 deletions google/cloud/aiplatform/v1/artifact.proto
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ message Artifact {
string schema_version = 15;

// Properties of the Artifact.
// Top level metadata keys' heading and trailing spaces will be trimmed.
// The size of this field should not exceed 200KB.
google.protobuf.Struct metadata = 16;

Expand Down
1 change: 1 addition & 0 deletions google/cloud/aiplatform/v1/context.proto
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ message Context {
string schema_version = 14;

// Properties of the Context.
// Top level metadata keys' heading and trailing spaces will be trimmed.
// The size of this field should not exceed 200KB.
google.protobuf.Struct metadata = 15;

Expand Down
9 changes: 9 additions & 0 deletions google/cloud/aiplatform/v1/dataset.proto
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,15 @@ message ImportDataConfig {
// file referenced by [import_schema_uri][google.cloud.aiplatform.v1.ImportDataConfig.import_schema_uri], e.g. jsonl file.
map<string, string> data_item_labels = 2;

// Labels that will be applied to newly imported Annotations. If two
// Annotations are identical, one of them will be deduped. Two Annotations are
// considered identical if their [payload][google.cloud.aiplatform.v1.Annotation.payload],
// [payload_schema_uri][google.cloud.aiplatform.v1.Annotation.payload_schema_uri] and all of their
// [labels][google.cloud.aiplatform.v1.Annotation.labels] are the same.
// These labels will be overridden by Annotation labels specified inside index
// file referenced by [import_schema_uri][google.cloud.aiplatform.v1.ImportDataConfig.import_schema_uri], e.g. jsonl file.
map<string, string> annotation_labels = 3;

// Required. Points to a YAML file stored on Google Cloud Storage describing the import
// format. Validation will be done against the schema. The schema is defined
// as an [OpenAPI 3.0.2 Schema
Expand Down
1 change: 1 addition & 0 deletions google/cloud/aiplatform/v1/execution.proto
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ message Execution {
string schema_version = 14;

// Properties of the Execution.
// Top level metadata keys' heading and trailing spaces will be trimmed.
// The size of this field should not exceed 200KB.
google.protobuf.Struct metadata = 15;

Expand Down
6 changes: 4 additions & 2 deletions google/cloud/aiplatform/v1/featurestore.proto
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,10 @@ message Featurestore {
// and are immutable.
map<string, string> labels = 6 [(google.api.field_behavior) = OPTIONAL];

// Optional. Config for online storage resources. If unset, the featurestore will
// not have an online store and cannot be used for online serving.
// Optional. Config for online storage resources. The field should not co-exist with the
// field of `OnlineStoreReplicationConfig`. If both of it and
// OnlineStoreReplicationConfig are unset, the feature store will not have an
// online store and cannot be used for online serving.
OnlineServingConfig online_serving_config = 7 [(google.api.field_behavior) = OPTIONAL];

// Output only. State of the featurestore.
Expand Down
7 changes: 6 additions & 1 deletion google/cloud/aiplatform/v1/featurestore_service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -614,6 +614,11 @@ message BatchReadFeatureValuesRequest {
// specifying entity IDs in the EntityType in
// [BatchReadFeatureValuesRequest.request][] .
repeated EntityTypeSpec entity_type_specs = 7 [(google.api.field_behavior) = REQUIRED];

// Optional. Excludes Feature values with feature generation timestamp before this
// timestamp. If not set, retrieve oldest values kept in Feature Store.
// Timestamp, if present, must not have higher than millisecond precision.
google.protobuf.Timestamp start_time = 11 [(google.api.field_behavior) = OPTIONAL];
}

// Request message for [FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ExportFeatureValues].
Expand Down Expand Up @@ -898,7 +903,7 @@ message CreateFeatureRequest {
// Required. The ID to use for the Feature, which will become the final component of
// the Feature's resource name.
//
// This value may be up to 60 characters, and valid characters are
// This value may be up to 128 characters, and valid characters are
// `[a-z0-9_]`. The first character cannot be a number.
//
// The value must be unique within an EntityType.
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/v1/index_endpoint.proto
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ message IndexEndpoint {
// are mutually exclusive.
//
// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert):
// projects/{project}/global/networks/{network}.
// `projects/{project}/global/networks/{network}`.
// Where {project} is a project number, as in '12345', and {network} is
// network name.
string network = 9 [(google.api.field_behavior) = OPTIONAL];
Expand Down
13 changes: 9 additions & 4 deletions google/cloud/aiplatform/v1/metadata_service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,8 @@ message ListArtifactsRequest {
// `in_context("projects/<project_number>/locations/<location>/metadataStores/<metadatastore_name>/contexts/<context-id>")`
//
// Each of the above supported filter types can be combined together using
// logical operators (`AND` & `OR`).
// logical operators (`AND` & `OR`). Maximum nested expression depth allowed
// is 5.
//
// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
string filter = 4;
Expand Down Expand Up @@ -750,7 +751,8 @@ message ListContextsRequest {
// ```
//
// Each of the above supported filters can be combined together using
// logical operators (`AND` & `OR`).
// logical operators (`AND` & `OR`). Maximum nested expression depth allowed
// is 5.
//
// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
string filter = 4;
Expand Down Expand Up @@ -1048,7 +1050,9 @@ message ListExecutionsRequest {
// `in_context("projects/<project_number>/locations/<location>/metadataStores/<metadatastore_name>/contexts/<context-id>")`
//
// Each of the above supported filters can be combined together using
// logical operators (`AND` & `OR`).
// logical operators (`AND` & `OR`). Maximum nested expression depth allowed
// is 5.
//
// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
string filter = 4;

Expand Down Expand Up @@ -1310,7 +1314,8 @@ message QueryArtifactLineageSubgraphRequest {
// For example: `metadata.field_1.number_value = 10.0`
//
// Each of the above supported filter types can be combined together using
// logical operators (`AND` & `OR`).
// logical operators (`AND` & `OR`). Maximum nested expression depth allowed
// is 5.
//
// For example: `display_name = "test" AND metadata.field1.bool_value = true`.
string filter = 3;
Expand Down
9 changes: 7 additions & 2 deletions google/cloud/aiplatform/v1/model.proto
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,9 @@ message Model {

// User provided version aliases so that a model version can be referenced via
// alias (i.e.
// projects/{project}/locations/{location}/models/{model_id}@{version_alias}
// `projects/{project}/locations/{location}/models/{model_id}@{version_alias}`
// instead of auto-generated version id (i.e.
// projects/{project}/locations/{location}/models/{model_id}@{version_id}).
// `projects/{project}/locations/{location}/models/{model_id}@{version_id})`.
// The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9] to distinguish from
// version_id. A default version alias will be created for the first version
// of the model, and there must be exactly one default version alias for a
Expand Down Expand Up @@ -332,6 +332,11 @@ message Model {
// Output only. Source of a model. It can either be automl training pipeline, custom
// training pipeline, BigQuery ML, or existing Vertex AI Model.
ModelSourceInfo model_source_info = 38 [(google.api.field_behavior) = OUTPUT_ONLY];

// Output only. The resource name of the Artifact that was created in MetadataStore when
// creating the Model. The Artifact resource name pattern is
// `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`.
string metadata_artifact = 44 [(google.api.field_behavior) = OUTPUT_ONLY];
}

// Contains the schemata used in Model's predictions and explanations via
Expand Down
15 changes: 13 additions & 2 deletions google/cloud/aiplatform/v1/pipeline_job.proto
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ message PipelineJob {
message InputArtifact {
oneof kind {
// Artifact resource id from MLMD. Which is the last portion of an
// artifact resource
// name(projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}).
// artifact resource name:
// `projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}`.
// The artifact must stay within the same project, location and default
// metadatastore as the pipeline.
string artifact_id = 1;
Expand Down Expand Up @@ -337,6 +337,17 @@ message PipelineTaskExecutorDetail {
type: "aiplatform.googleapis.com/CustomJob"
}
];

// Output only. The names of the previously failed [CustomJob][google.cloud.aiplatform.v1.CustomJob] for the main container
// executions. The list includes the all attempts in chronological order.
repeated string failed_main_jobs = 3 [(google.api.field_behavior) = OUTPUT_ONLY];

// Output only. The names of the previously failed [CustomJob][google.cloud.aiplatform.v1.CustomJob] for the
// pre-caching-check container executions. This job will be available if the
// [PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec] specifies the `pre_caching_check` hook in
// the lifecycle events.
// The list includes the all attempts in chronological order.
repeated string failed_pre_caching_check_jobs = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
}

// The detailed info for a custom job executor.
Expand Down
8 changes: 4 additions & 4 deletions google/cloud/aiplatform/v1/tensorboard_service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ message GetTensorboardExperimentRequest {
message ListTensorboardExperimentsRequest {
// Required. The resource name of the Tensorboard to list TensorboardExperiments.
// Format:
// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}'
// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
Expand Down Expand Up @@ -605,7 +605,7 @@ message GetTensorboardRunRequest {
message ReadTensorboardBlobDataRequest {
// Required. The resource name of the TensorboardTimeSeries to list Blobs.
// Format:
// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}'
// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
string time_series = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
Expand All @@ -627,7 +627,7 @@ message ReadTensorboardBlobDataResponse {
message ListTensorboardRunsRequest {
// Required. The resource name of the TensorboardExperiment to list TensorboardRuns.
// Format:
// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}'
// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
Expand Down Expand Up @@ -767,7 +767,7 @@ message GetTensorboardTimeSeriesRequest {
message ListTensorboardTimeSeriesRequest {
// Required. The resource name of the TensorboardRun to list TensorboardTimeSeries.
// Format:
// 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}'
// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}`
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
Expand Down
3 changes: 3 additions & 0 deletions google/cloud/aiplatform/v1/training_pipeline.proto
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,9 @@ message InputDataConfig {
// Only one of [saved_query_id][google.cloud.aiplatform.v1.InputDataConfig.saved_query_id] and [annotation_schema_uri][google.cloud.aiplatform.v1.InputDataConfig.annotation_schema_uri] should be
// specified as both of them represent the same thing: problem type.
string saved_query_id = 7;

// Whether to persist the ML use assignment to data item system labels.
bool persist_ml_use_assignment = 11;
}

// Assigns the input data to training, validation, and test sets as per the
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/v1/user_action_reference.proto
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,13 @@ message UserActionReference {
// For API calls that return a long running operation.
// Resource name of the long running operation.
// Format:
// 'projects/{project}/locations/{location}/operations/{operation}'
// `projects/{project}/locations/{location}/operations/{operation}`
string operation = 1;

// For API calls that start a LabelingJob.
// Resource name of the LabelingJob.
// Format:
// 'projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}'
// `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
string data_labeling_job = 2;
}

Expand Down

0 comments on commit 9691f51

Please sign in to comment.