Skip to content

Commit

Permalink
feat: [storagetransfer] support cross-bucket replication (#5891)
Browse files Browse the repository at this point in the history
* feat: support cross-bucket replication

PiperOrigin-RevId: 707205525

Source-Link: googleapis/googleapis@67495ab

Source-Link: googleapis/googleapis-gen@9c2deac
Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLXN0b3JhZ2V0cmFuc2Zlci8uT3dsQm90LnlhbWwiLCJoIjoiOWMyZGVhYzk0MGNlZDI4ZDY4YzgyOTZkNGFlYjEzMTM5ZDhhYzkxNCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

---------

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] authored Dec 18, 2024
1 parent 5564d05 commit ad37361
Show file tree
Hide file tree
Showing 9 changed files with 702 additions and 77 deletions.
4 changes: 2 additions & 2 deletions packages/google-storagetransfer/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ Google APIs Client Libraries, in [Client Libraries Explained][explained].
1. [Select or create a Cloud Platform project][projects].
1. [Enable billing for your project][billing].
1. [Enable the Storage Transfer Service API][enable_api].
1. [Set up authentication with a service account][auth] so you can access the
1. [Set up authentication][auth] so you can access the
API from your local workstation.

### Installing the client library
Expand Down Expand Up @@ -205,4 +205,4 @@ See [LICENSE](https://github.com/googleapis/google-cloud-node/blob/main/LICENSE)
[projects]: https://console.cloud.google.com/project
[billing]: https://support.google.com/cloud/answer/6293499#enable-billing
[enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=storagetransfer.googleapis.com
[auth]: https://cloud.google.com/docs/authentication/getting-started
[auth]: https://cloud.google.com/docs/authentication/external/set-up-adc-local
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import "google/protobuf/empty.proto";
import "google/protobuf/field_mask.proto";
import "google/storagetransfer/v1/transfer_types.proto";

option cc_enable_arenas = true;
option csharp_namespace = "Google.Cloud.StorageTransfer.V1";
option go_package = "cloud.google.com/go/storagetransfer/apiv1/storagetransferpb;storagetransferpb";
option java_outer_classname = "TransferProto";
Expand Down Expand Up @@ -254,17 +253,32 @@ message DeleteTransferJobRequest {
// be specified when listing transfer jobs.
message ListTransferJobsRequest {
// Required. A list of query parameters specified as JSON text in the form of:
// `{"projectId":"my_project_id",
// "jobNames":["jobid1","jobid2",...],
// "jobStatuses":["status1","status2",...]}`
//
// Since `jobNames` and `jobStatuses` support multiple values, their values
// must be specified with array notation. `projectId` is required.
// `jobNames` and `jobStatuses` are optional. The valid values for
// `jobStatuses` are case-insensitive:
// [ENABLED][google.storagetransfer.v1.TransferJob.Status.ENABLED],
// [DISABLED][google.storagetransfer.v1.TransferJob.Status.DISABLED], and
// [DELETED][google.storagetransfer.v1.TransferJob.Status.DELETED].
// ```
// {
// "projectId":"my_project_id",
// "jobNames":["jobid1","jobid2",...],
// "jobStatuses":["status1","status2",...],
// "dataBackend":"QUERY_REPLICATION_CONFIGS",
// "sourceBucket":"source-bucket-name",
// "sinkBucket":"sink-bucket-name",
// }
// ```
//
// The JSON formatting in the example is for display only; provide the
// query parameters without spaces or line breaks.
//
// * `projectId` is required.
// * Since `jobNames` and `jobStatuses` support multiple values, their values
// must be specified with array notation. `jobNames` and `jobStatuses` are
// optional. Valid values are case-insensitive:
// * [ENABLED][google.storagetransfer.v1.TransferJob.Status.ENABLED]
// * [DISABLED][google.storagetransfer.v1.TransferJob.Status.DISABLED]
// * [DELETED][google.storagetransfer.v1.TransferJob.Status.DELETED]
// * Specify `"dataBackend":"QUERY_REPLICATION_CONFIGS"` to return a list of
// cross-bucket replication jobs.
// * Limit the results to jobs from a particular bucket with `sourceBucket`
// and/or to a particular bucket with `sinkBucket`.
string filter = 1 [(google.api.field_behavior) = REQUIRED];

// The list page size. The max allowed value is 256.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import "google/rpc/code.proto";
import "google/type/date.proto";
import "google/type/timeofday.proto";

option cc_enable_arenas = true;
option csharp_namespace = "Google.Cloud.StorageTransfer.V1";
option go_package = "cloud.google.com/go/storagetransfer/apiv1/storagetransferpb;storagetransferpb";
option java_outer_classname = "TransferTypes";
Expand Down Expand Up @@ -352,43 +351,43 @@ message AzureBlobStorageData {
string credentials_secret = 7 [(google.api.field_behavior) = OPTIONAL];
}

// An HttpData resource specifies a list of objects on the web to be transferred
// over HTTP. The information of the objects to be transferred is contained in
// a file referenced by a URL. The first line in the file must be
// `"TsvHttpData-1.0"`, which specifies the format of the file. Subsequent
// lines specify the information of the list of objects, one object per list
// entry. Each entry has the following tab-delimited fields:
// An HttpData resource specifies a list of objects on the web to be
// transferred over HTTP. The information of the objects to be transferred is
// contained in a file referenced by a URL. The first line in the file must be
// `"TsvHttpData-1.0"`, which specifies the format of the file. Subsequent
// lines specify the information of the list of objects, one object per list
// entry. Each entry has the following tab-delimited fields:
//
// * **HTTP URL** — The location of the object.
// * **HTTP URL** — The location of the object.
//
// * **Length** — The size of the object in bytes.
// * **Length** — The size of the object in bytes.
//
// * **MD5** — The base64-encoded MD5 hash of the object.
// * **MD5** — The base64-encoded MD5 hash of the object.
//
// For an example of a valid TSV file, see
// [Transferring data from
// URLs](https://cloud.google.com/storage-transfer/docs/create-url-list).
// For an example of a valid TSV file, see
// [Transferring data from
// URLs](https://cloud.google.com/storage-transfer/docs/create-url-list).
//
// When transferring data based on a URL list, keep the following in mind:
// When transferring data based on a URL list, keep the following in mind:
//
// * When an object located at `http(s)://hostname:port/<URL-path>` is
// transferred to a data sink, the name of the object at the data sink is
// transferred to a data sink, the name of the object at the data sink is
// `<hostname>/<URL-path>`.
//
// * If the specified size of an object does not match the actual size of the
// object fetched, the object is not transferred.
// object fetched, the object is not transferred.
//
// * If the specified MD5 does not match the MD5 computed from the transferred
// bytes, the object transfer fails.
// bytes, the object transfer fails.
//
// * Ensure that each URL you specify is publicly accessible. For
// example, in Cloud Storage you can
// [share an object publicly]
// (/storage/docs/cloud-console#_sharingdata) and get a link to it.
// example, in Cloud Storage you can
// [share an object publicly]
// (/storage/docs/cloud-console#_sharingdata) and get a link to it.
//
// * Storage Transfer Service obeys `robots.txt` rules and requires the source
// HTTP server to support `Range` requests and to return a `Content-Length`
// header in each response.
// HTTP server to support `Range` requests and to return a `Content-Length`
// header in each response.
//
// * [ObjectConditions][google.storagetransfer.v1.ObjectConditions] have no
// effect when filtering objects to transfer.
Expand Down Expand Up @@ -686,6 +685,35 @@ message TransferSpec {
string sink_agent_pool_name = 18;
}

// Specifies the configuration for a cross-bucket replication job. Cross-bucket
// replication copies new or updated objects from a source Cloud Storage bucket
// to a destination Cloud Storage bucket. Existing objects in the source bucket
// are not copied by a new cross-bucket replication job.
message ReplicationSpec {
// The data source to be replicated.
oneof data_source {
// The Cloud Storage bucket from which to replicate objects.
GcsData gcs_data_source = 1;
}

// The destination for replicated objects.
oneof data_sink {
// The Cloud Storage bucket to which to replicate objects.
GcsData gcs_data_sink = 2;
}

// Object conditions that determine which objects are transferred. For
// replication jobs, only `include_prefixes` and `exclude_prefixes` are
// supported.
ObjectConditions object_conditions = 3;

// Specifies the metadata options to be applied during replication.
// Delete options are not supported. If a delete option is specified, the
// request fails with an [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]
// error.
TransferOptions transfer_options = 4;
}

// Specifies the metadata options for running a transfer.
message MetadataOptions {
// Whether symlinks should be skipped or preserved during a transfer job.
Expand Down Expand Up @@ -1033,6 +1061,9 @@ message TransferJob {
// Transfer specification.
TransferSpec transfer_spec = 4;

// Replication specification.
ReplicationSpec replication_spec = 17;

// Notification configuration.
NotificationConfig notification_config = 11;

Expand Down
127 changes: 127 additions & 0 deletions packages/google-storagetransfer/protos/protos.d.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit ad37361

Please sign in to comment.