From fe9de9c6fad6b98f1a53d7aea061c108b289efb9 Mon Sep 17 00:00:00 2001 From: Ajay Kannan Date: Sun, 17 Jan 2016 09:26:00 -0800 Subject: [PATCH 1/2] Merge ProjectInfo and Project --- .../examples/ResourceManagerExample.java | 11 +- .../gcloud/resourcemanager/Project.java | 419 ++++++++++++++++-- .../gcloud/resourcemanager/ProjectInfo.java | 353 --------------- .../resourcemanager/ResourceManager.java | 26 +- .../resourcemanager/ResourceManagerImpl.java | 37 +- .../gcloud/resourcemanager/package-info.java | 8 +- .../resourcemanager/ProjectInfoTest.java | 109 ----- .../gcloud/resourcemanager/ProjectTest.java | 186 ++++++-- .../ResourceManagerImplTest.java | 70 +-- .../resourcemanager/SerializationTest.java | 16 +- 10 files changed, 625 insertions(+), 610 deletions(-) delete mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java delete mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java index c1ba4e06cf7d..8aceeb832515 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java @@ -17,7 +17,7 @@ package com.google.gcloud.examples; import com.google.common.base.Joiner; -import com.google.gcloud.resourcemanager.ProjectInfo; +import com.google.gcloud.resourcemanager.Project; import com.google.gcloud.resourcemanager.ResourceManager; import com.google.gcloud.resourcemanager.ResourceManagerOptions; @@ -64,8 +64,7 @@ public void run(ResourceManager resourceManager, String... args) { labels.put(args[i], ""); } } - ProjectInfo project = - resourceManager.create(ProjectInfo.builder(projectId).labels(labels).build()); + Project project = Project.builder(resourceManager, projectId).labels(labels).build().create(); System.out.printf( "Successfully created project '%s': %s.%n", projectId, projectDetails(project)); } @@ -111,7 +110,7 @@ private static class GetAction implements ResourceManagerAction { @Override public void run(ResourceManager resourceManager, String... args) { String projectId = args[0]; - ProjectInfo project = resourceManager.get(projectId); + Project project = resourceManager.get(projectId); if (project != null) { System.out.printf( "Successfully got project '%s': %s.%n", projectId, projectDetails(project)); @@ -135,7 +134,7 @@ private static class ListAction implements ResourceManagerAction { @Override public void run(ResourceManager resourceManager, String... args) { System.out.println("Projects you can view:"); - for (ProjectInfo project : resourceManager.list().values()) { + for (Project project : resourceManager.list().values()) { System.out.println(projectDetails(project)); } } @@ -158,7 +157,7 @@ public String[] getOptionalParams() { ACTIONS.put("list", new ListAction()); } - private static String projectDetails(ProjectInfo project) { + private static String projectDetails(Project project) { return new StringBuilder() .append("{projectId:") .append(project.projectId()) diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java index e329e1aa3714..9c6b9465cbaf 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java @@ -16,8 +16,23 @@ package com.google.gcloud.resourcemanager; +import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.api.client.util.Data; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +import org.joda.time.DateTime; +import org.joda.time.format.ISODateTimeFormat; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + /** * A Google Cloud Resource Manager project object. * @@ -26,35 +41,295 @@ * immutable. Methods that change or update the underlying Project information return a new Project * instance. */ -public class Project { +public class Project implements Serializable { - private final ResourceManager resourceManager; - private final ProjectInfo info; + private static final long serialVersionUID = 6772622204012731332L; + + private final String name; + private final String projectId; + private final Map labels; + private final Long projectNumber; + private final State state; + private final Long createTimeMillis; + private final ResourceId parent; + private final ResourceManagerOptions options; + private transient ResourceManager resourceManager; /** - * Constructs a Project object that contains the ProjectInfo given. + * The project lifecycle states. */ - public Project(ResourceManager resourceManager, ProjectInfo projectInfo) { - this.resourceManager = checkNotNull(resourceManager); - this.info = checkNotNull(projectInfo); + public enum State { + /** + * Only used/useful for distinguishing unset values. + */ + LIFECYCLE_STATE_UNSPECIFIED, + + /** + * The normal and active state. + */ + ACTIVE, + + /** + * The project has been marked for deletion by the user or by the system (Google Cloud + * Platform). This can generally be reversed by calling {@link ResourceManager#undelete}. + */ + DELETE_REQUESTED, + + /** + * The process of deleting the project has begun. Reversing the deletion is no longer possible. + */ + DELETE_IN_PROGRESS + } + + static class ResourceId implements Serializable { + + private static final long serialVersionUID = 214964928747889072L; + + private final String id; + private final String type; + + ResourceId(String id, String type) { + this.id = checkNotNull(id); + this.type = checkNotNull(type); + } + + String id() { + return id; + } + + String type() { + return type; + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ResourceId && Objects.equals(toPb(), ((ResourceId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(id, type); + } + + com.google.api.services.cloudresourcemanager.model.ResourceId toPb() { + com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb = + new com.google.api.services.cloudresourcemanager.model.ResourceId(); + resourceIdPb.setId(id); + resourceIdPb.setType(type.toLowerCase()); + return resourceIdPb; + } + + static ResourceId fromPb( + com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb) { + return new ResourceId(resourceIdPb.getId(), resourceIdPb.getType()); + } + } + + public static class Builder { + + private String name; + private String projectId; + private Map labels = new HashMap<>(); + private Long projectNumber; + private State state; + private Long createTimeMillis; + private ResourceId parent; + private ResourceManager resourceManager; + + private Builder() { + } + + Builder(Project project) { + this.name = project.name; + this.projectId = project.projectId; + this.labels.putAll(project.labels); + this.projectNumber = project.projectNumber; + this.state = project.state; + this.createTimeMillis = project.createTimeMillis; + this.parent = project.parent; + this.resourceManager = project.resourceManager; + } + + /** + * Set the user-assigned name of the project. + * + *

This field is optional and can remain unset. Allowed characters are: lowercase and + * uppercase letters, numbers, hyphen, single-quote, double-quote, space, and exclamation point. + * This field can be changed after project creation. + */ + public Builder name(String name) { + this.name = firstNonNull(name, Data.nullOf(String.class)); + return this; + } + + /** + * Set the unique, user-assigned ID of the project. + * + *

The ID must be 6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. + * Trailing hyphens are prohibited. This field cannot be changed after the server creates the + * project. + */ + public Builder projectId(String projectId) { + this.projectId = checkNotNull(projectId); + return this; + } + + /** + * Add a label associated with this project. + * + *

See {@link #labels} for label restrictions. + */ + public Builder addLabel(String key, String value) { + this.labels.put(key, value); + return this; + } + + /** + * Remove a label associated with this project. + */ + public Builder removeLabel(String key) { + this.labels.remove(key); + return this; + } + + /** + * Clear the labels associated with this project. + */ + public Builder clearLabels() { + this.labels.clear(); + return this; + } + + /** + * Set the labels associated with this project. + * + *

Label keys must be between 1 and 63 characters long and must conform to the following + * regular expression: [a-z]([-a-z0-9]*[a-z0-9])?. Label values must be between 0 and 63 + * characters long and must conform to the regular expression ([a-z]([-a-z0-9]*[a-z0-9])?)?. No + * more than 256 labels can be associated with a given resource. This field can be changed after + * project creation. + */ + public Builder labels(Map labels) { + this.labels = Maps.newHashMap(checkNotNull(labels)); + return this; + } + + Builder projectNumber(Long projectNumber) { + this.projectNumber = projectNumber; + return this; + } + + Builder state(State state) { + this.state = state; + return this; + } + + Builder createTimeMillis(Long createTimeMillis) { + this.createTimeMillis = createTimeMillis; + return this; + } + + Builder parent(ResourceId parent) { + this.parent = parent; + return this; + } + + /** + * Sets the service object associated with this Project. This service object is used to send + * requests relevant to this blob (e.g. {@link #delete}, {@link #get}, {@link #reload}, + * {@link #replace}, {@link #undelete}). + */ + public Builder resourceManager(ResourceManager resourceManager) { + this.resourceManager = checkNotNull(resourceManager); + return this; + } + + public Project build() { + return new Project(this); + } + } + + Project(Builder builder) { + this.name = builder.name; + this.projectId = builder.projectId; + this.labels = ImmutableMap.copyOf(builder.labels); + this.projectNumber = builder.projectNumber; + this.state = builder.state; + this.createTimeMillis = builder.createTimeMillis; + this.parent = builder.parent; + this.options = builder.resourceManager.options(); + this.resourceManager = builder.resourceManager; + } + + Project(String name, String projectId, Map labels, Long projectNumber, + State state, Long createTimeMillis, ResourceId parent, ResourceManagerOptions options, + ResourceManager resourceManager) { + this.name = name; + this.projectId = projectId; + this.labels = ImmutableMap.copyOf(labels); + this.projectNumber = projectNumber; + this.state = state; + this.createTimeMillis = createTimeMillis; + this.parent = parent; + this.options = options; + this.resourceManager = resourceManager; } /** - * Constructs a Project object that contains project information got from the server. + * Get the unique, user-assigned ID of the project. * - * @return Project object containing the project's metadata or {@code null} if not found - * @throws ResourceManagerException upon failure + *

This field cannot be changed after the server creates the project. */ - public static Project get(ResourceManager resourceManager, String projectId) { - ProjectInfo projectInfo = resourceManager.get(projectId); - return projectInfo != null ? new Project(resourceManager, projectInfo) : null; + public String projectId() { + return projectId; + } + + /** + * Get the user-assigned name of the project. + * + *

This field is optional, can remain unset, and can be changed after project creation. + */ + public String name() { + return Data.isNull(name) ? null : name; + } + + /** + * Get number uniquely identifying the project. + * + *

This field is set by the server and is read-only. + */ + public Long projectNumber() { + return projectNumber; } /** - * Returns the {@link ProjectInfo} object associated with this Project. + * Get the immutable map of labels associated with this project. */ - public ProjectInfo info() { - return info; + public Map labels() { + return labels; + } + + /** + * Get the project's lifecycle state. + * + *

This is a read-only field. To change the lifecycle state of your project, use the + * {@code delete} or {@code undelete} method. + */ + public State state() { + return state; + } + + ResourceId parent() { + return parent; + } + + /** + * Get the project's creation time (in milliseconds). + * + *

This field is set by the server and is read-only. + */ + public Long createTimeMillis() { + return createTimeMillis; } /** @@ -64,6 +339,46 @@ public ResourceManager resourceManager() { return resourceManager; } + @Override + public boolean equals(Object obj) { + return obj instanceof Project && Objects.equals(toPb(), ((Project) obj).toPb()) + && Objects.equals(options, ((Project) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash( + name, projectId, labels, projectNumber, state, createTimeMillis, parent, options); + } + + public static Builder builder(ResourceManager resourceManager, String id) { + return new Builder().projectId(id).resourceManager(resourceManager); + } + + public Builder toBuilder() { + return new Builder(this); + } + + /** + * Creates a project. + * + * @return a Project containing the project's data + * @throws ResourceManagerException upon failure + */ + public Project create() { + return resourceManager.create(this); + } + + /** + * Constructs a Project object that contains project information loaded from the server. + * + * @return Project object containing the project's data or {@code null} if not found + * @throws ResourceManagerException upon failure + */ + public static Project get(ResourceManager resourceManager, String projectId) { + return resourceManager.get(projectId); + } + /** * Fetches the current project's latest information. Returns {@code null} if the job does not * exist. @@ -72,7 +387,7 @@ public ResourceManager resourceManager() { * @throws ResourceManagerException upon failure */ public Project reload() { - return Project.get(resourceManager, info.projectId()); + return resourceManager.get(projectId); } /** @@ -81,11 +396,11 @@ public Project reload() { *

This method will only affect the project if the following criteria are met: *

    *
  • The project does not have a billing account associated with it. - *
  • The project has a lifecycle state of {@link ProjectInfo.State#ACTIVE}. + *
  • The project has a lifecycle state of {@link Project.State#ACTIVE}. *
- * This method changes the project's lifecycle state from {@link ProjectInfo.State#ACTIVE} to - * {@link ProjectInfo.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at - * which point the lifecycle state changes to {@link ProjectInfo.State#DELETE_IN_PROGRESS}. Until + * This method changes the project's lifecycle state from {@link Project.State#ACTIVE} to + * {@link Project.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at + * which point the lifecycle state changes to {@link Project.State#DELETE_IN_PROGRESS}. Until * the deletion completes, you can check the lifecycle state checked by retrieving the project * with {@link ResourceManager#get}, and the project remains visible to * {@link ResourceManager#list}. However, you cannot update the project. After the deletion @@ -98,15 +413,15 @@ public Project reload() { * @throws ResourceManagerException upon failure */ public void delete() { - resourceManager.delete(info.projectId()); + resourceManager.delete(projectId); } /** * Restores the project identified by the specified project ID. * *

You can only use this method for a project that has a lifecycle state of - * {@link ProjectInfo.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle - * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The + * {@link Project.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle + * state of {@link Project.State#DELETE_IN_PROGRESS}, the project cannot be restored. The * caller must have modify permissions for this project. * * @see Cloud * Resource Manager update - * @return the ProjectInfo representing the new project metadata + * @return the Project representing the new project metadata * @throws ResourceManagerException upon failure */ - public Project replace(ProjectInfo projectInfo) { - return new Project(resourceManager, resourceManager.replace(checkNotNull(projectInfo))); + public Project replace(Project project) { + return resourceManager.replace(checkNotNull(project)); + } + + com.google.api.services.cloudresourcemanager.model.Project toPb() { + com.google.api.services.cloudresourcemanager.model.Project projectPb = + new com.google.api.services.cloudresourcemanager.model.Project(); + projectPb.setName(name); + projectPb.setProjectId(projectId); + projectPb.setLabels(labels); + projectPb.setProjectNumber(projectNumber); + if (state != null) { + projectPb.setLifecycleState(state.toString()); + } + if (createTimeMillis != null) { + projectPb.setCreateTime(ISODateTimeFormat.dateTime().withZoneUTC().print(createTimeMillis)); + } + if (parent != null) { + projectPb.setParent(parent.toPb()); + } + return projectPb; + } + + static Project fromPb( + ResourceManager resourceManager, + com.google.api.services.cloudresourcemanager.model.Project projectPb) { + Builder builder = + builder(resourceManager, projectPb.getProjectId()) + .projectNumber(projectPb.getProjectNumber()); + if (projectPb.getName() != null && !projectPb.getName().equals("Unnamed")) { + builder.name(projectPb.getName()); + } + if (projectPb.getLabels() != null) { + builder.labels(projectPb.getLabels()); + } + if (projectPb.getLifecycleState() != null) { + builder.state(State.valueOf(projectPb.getLifecycleState())); + } + if (projectPb.getCreateTime() != null) { + builder.createTimeMillis(DateTime.parse(projectPb.getCreateTime()).getMillis()); + } + if (projectPb.getParent() != null) { + builder.parent(ResourceId.fromPb(projectPb.getParent())); + } + return builder.build(); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.resourceManager = options.service(); } } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java deleted file mode 100644 index 2cb8a2d93ad2..000000000000 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java +++ /dev/null @@ -1,353 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - */ - -package com.google.gcloud.resourcemanager; - -import static com.google.common.base.MoreObjects.firstNonNull; -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.api.client.util.Data; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; - -import org.joda.time.DateTime; -import org.joda.time.format.ISODateTimeFormat; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A Google Cloud Resource Manager project metadata object. - * A Project is a high-level Google Cloud Platform entity. It is a container for ACLs, APIs, - * AppEngine Apps, VMs, and other Google Cloud Platform resources. - */ -public class ProjectInfo implements Serializable { - - private static final long serialVersionUID = 9148970963697734236L; - private final String name; - private final String projectId; - private final Map labels; - private final Long projectNumber; - private final State state; - private final Long createTimeMillis; - private final ResourceId parent; - - /** - * The project lifecycle states. - */ - public enum State { - /** - * Only used/useful for distinguishing unset values. - */ - LIFECYCLE_STATE_UNSPECIFIED, - - /** - * The normal and active state. - */ - ACTIVE, - - /** - * The project has been marked for deletion by the user or by the system (Google Cloud - * Platform). This can generally be reversed by calling {@link ResourceManager#undelete}. - */ - DELETE_REQUESTED, - - /** - * The process of deleting the project has begun. Reversing the deletion is no longer possible. - */ - DELETE_IN_PROGRESS - } - - static class ResourceId implements Serializable { - - private static final long serialVersionUID = -325199985993344726L; - - private final String id; - private final String type; - - ResourceId(String id, String type) { - this.id = checkNotNull(id); - this.type = checkNotNull(type); - } - - String id() { - return id; - } - - String type() { - return type; - } - - @Override - public boolean equals(Object obj) { - return obj instanceof ResourceId && Objects.equals(toPb(), ((ResourceId) obj).toPb()); - } - - @Override - public int hashCode() { - return Objects.hash(id, type); - } - - com.google.api.services.cloudresourcemanager.model.ResourceId toPb() { - com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb = - new com.google.api.services.cloudresourcemanager.model.ResourceId(); - resourceIdPb.setId(id); - resourceIdPb.setType(type.toLowerCase()); - return resourceIdPb; - } - - static ResourceId fromPb( - com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb) { - return new ResourceId(resourceIdPb.getId(), resourceIdPb.getType()); - } - } - - public static class Builder { - - private String name; - private String projectId; - private Map labels = new HashMap<>(); - private Long projectNumber; - private State state; - private Long createTimeMillis; - private ResourceId parent; - - private Builder() { - } - - Builder(ProjectInfo info) { - this.name = info.name; - this.projectId = info.projectId; - this.labels.putAll(info.labels); - this.projectNumber = info.projectNumber; - this.state = info.state; - this.createTimeMillis = info.createTimeMillis; - this.parent = info.parent; - } - - /** - * Set the user-assigned name of the project. - * - *

This field is optional and can remain unset. Allowed characters are: lowercase and - * uppercase letters, numbers, hyphen, single-quote, double-quote, space, and exclamation point. - * This field can be changed after project creation. - */ - public Builder name(String name) { - this.name = firstNonNull(name, Data.nullOf(String.class)); - return this; - } - - /** - * Set the unique, user-assigned ID of the project. - * - *

The ID must be 6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. - * Trailing hyphens are prohibited. This field cannot be changed after the server creates the - * project. - */ - public Builder projectId(String projectId) { - this.projectId = checkNotNull(projectId); - return this; - } - - /** - * Add a label associated with this project. - * - *

See {@link #labels} for label restrictions. - */ - public Builder addLabel(String key, String value) { - this.labels.put(key, value); - return this; - } - - /** - * Remove a label associated with this project. - */ - public Builder removeLabel(String key) { - this.labels.remove(key); - return this; - } - - /** - * Clear the labels associated with this project. - */ - public Builder clearLabels() { - this.labels.clear(); - return this; - } - - /** - * Set the labels associated with this project. - * - *

Label keys must be between 1 and 63 characters long and must conform to the following - * regular expression: [a-z]([-a-z0-9]*[a-z0-9])?. Label values must be between 0 and 63 - * characters long and must conform to the regular expression ([a-z]([-a-z0-9]*[a-z0-9])?)?. No - * more than 256 labels can be associated with a given resource. This field can be changed after - * project creation. - */ - public Builder labels(Map labels) { - this.labels = Maps.newHashMap(checkNotNull(labels)); - return this; - } - - Builder projectNumber(Long projectNumber) { - this.projectNumber = projectNumber; - return this; - } - - Builder state(State state) { - this.state = state; - return this; - } - - Builder createTimeMillis(Long createTimeMillis) { - this.createTimeMillis = createTimeMillis; - return this; - } - - Builder parent(ResourceId parent) { - this.parent = parent; - return this; - } - - public ProjectInfo build() { - return new ProjectInfo(this); - } - } - - ProjectInfo(Builder builder) { - this.name = builder.name; - this.projectId = builder.projectId; - this.labels = ImmutableMap.copyOf(builder.labels); - this.projectNumber = builder.projectNumber; - this.state = builder.state; - this.createTimeMillis = builder.createTimeMillis; - this.parent = builder.parent; - } - - /** - * Get the unique, user-assigned ID of the project. - * - *

This field cannot be changed after the server creates the project. - */ - public String projectId() { - return projectId; - } - - /** - * Get the user-assigned name of the project. - * - *

This field is optional, can remain unset, and can be changed after project creation. - */ - public String name() { - return Data.isNull(name) ? null : name; - } - - /** - * Get number uniquely identifying the project. - * - *

This field is set by the server and is read-only. - */ - public Long projectNumber() { - return projectNumber; - } - - /** - * Get the immutable map of labels associated with this project. - */ - public Map labels() { - return labels; - } - - /** - * Get the project's lifecycle state. - * - *

This is a read-only field. To change the lifecycle state of your project, use the - * {@code delete} or {@code undelete} method. - */ - public State state() { - return state; - } - - ResourceId parent() { - return parent; - } - - /** - * Get the project's creation time (in milliseconds). - * - *

This field is set by the server and is read-only. - */ - public Long createTimeMillis() { - return createTimeMillis; - } - - @Override - public boolean equals(Object obj) { - return obj instanceof ProjectInfo && Objects.equals(toPb(), ((ProjectInfo) obj).toPb()); - } - - @Override - public int hashCode() { - return Objects.hash(name, projectId, labels, projectNumber, state, createTimeMillis, parent); - } - - public static Builder builder(String id) { - return new Builder().projectId(id); - } - - public Builder toBuilder() { - return new Builder(this); - } - - com.google.api.services.cloudresourcemanager.model.Project toPb() { - com.google.api.services.cloudresourcemanager.model.Project projectPb = - new com.google.api.services.cloudresourcemanager.model.Project(); - projectPb.setName(name); - projectPb.setProjectId(projectId); - projectPb.setLabels(labels); - projectPb.setProjectNumber(projectNumber); - if (state != null) { - projectPb.setLifecycleState(state.toString()); - } - if (createTimeMillis != null) { - projectPb.setCreateTime(ISODateTimeFormat.dateTime().withZoneUTC().print(createTimeMillis)); - } - if (parent != null) { - projectPb.setParent(parent.toPb()); - } - return projectPb; - } - - static ProjectInfo fromPb(com.google.api.services.cloudresourcemanager.model.Project projectPb) { - Builder builder = builder(projectPb.getProjectId()).projectNumber(projectPb.getProjectNumber()); - if (projectPb.getName() != null && !projectPb.getName().equals("Unnamed")) { - builder.name(projectPb.getName()); - } - if (projectPb.getLabels() != null) { - builder.labels(projectPb.getLabels()); - } - if (projectPb.getLifecycleState() != null) { - builder.state(State.valueOf(projectPb.getLifecycleState())); - } - if (projectPb.getCreateTime() != null) { - builder.createTimeMillis(DateTime.parse(projectPb.getCreateTime()).getMillis()); - } - if (projectPb.getParent() != null) { - builder.parent(ResourceId.fromPb(projectPb.getParent())); - } - return builder.build(); - } -} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java index 3d658d18d28a..5716b891ec89 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java @@ -179,12 +179,12 @@ public static ProjectListOption fields(ProjectField... fields) { * @see Cloud * Resource Manager create - * @return ProjectInfo object representing the new project's metadata. The returned object will + * @return Project object representing the new project's metadata. The returned object will * include the following read-only fields supplied by the server: project number, lifecycle * state, and creation time. * @throws ResourceManagerException upon failure */ - ProjectInfo create(ProjectInfo project); + Project create(Project project); /** * Marks the project identified by the specified project ID for deletion. @@ -192,11 +192,11 @@ public static ProjectListOption fields(ProjectField... fields) { *

This method will only affect the project if the following criteria are met: *

    *
  • The project does not have a billing account associated with it. - *
  • The project has a lifecycle state of {@link ProjectInfo.State#ACTIVE}. + *
  • The project has a lifecycle state of {@link Project.State#ACTIVE}. *
- * This method changes the project's lifecycle state from {@link ProjectInfo.State#ACTIVE} to - * {@link ProjectInfo.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at - * which point the lifecycle state changes to {@link ProjectInfo.State#DELETE_IN_PROGRESS}. Until + * This method changes the project's lifecycle state from {@link Project.State#ACTIVE} to + * {@link Project.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at + * which point the lifecycle state changes to {@link Project.State#DELETE_IN_PROGRESS}. Until * the deletion completes, you can check the lifecycle state checked by retrieving the project * with {@link ResourceManager#get}, and the project remains visible to * {@link ResourceManager#list}. However, you cannot update the project. After the deletion @@ -221,7 +221,7 @@ public static ProjectListOption fields(ProjectField... fields) { * Resource Manager get * @throws ResourceManagerException upon failure */ - ProjectInfo get(String projectId, ProjectGetOption... options); + Project get(String projectId, ProjectGetOption... options); /** * Lists the projects visible to the current user. @@ -234,10 +234,10 @@ public static ProjectListOption fields(ProjectField... fields) { * @see Cloud * Resource Manager list - * @return {@code Page}, a page of projects + * @return {@code Page}, a page of projects * @throws ResourceManagerException upon failure */ - Page list(ProjectListOption... options); + Page list(ProjectListOption... options); /** * Replaces the attributes of the project. @@ -247,17 +247,17 @@ public static ProjectListOption fields(ProjectField... fields) { * @see Cloud * Resource Manager update - * @return the ProjectInfo representing the new project metadata + * @return the Project representing the new project metadata * @throws ResourceManagerException upon failure */ - ProjectInfo replace(ProjectInfo newProject); + Project replace(Project newProject); /** * Restores the project identified by the specified project ID. * *

You can only use this method for a project that has a lifecycle state of - * {@link ProjectInfo.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle - * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The + * {@link Project.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle + * state of {@link Project.State#DELETE_IN_PROGRESS}, the project cannot be restored. The * caller must have modify permissions for this project. * * @see () { @Override public com.google.api.services.cloudresourcemanager.model.Project call() { - return resourceManagerRpc.create(project.toPb()); + return resourceManagerRpc.create(project.toPb()); } }, options().retryParams(), EXCEPTION_HANDLER)); } catch (RetryHelperException e) { @@ -101,7 +103,7 @@ public Void call() { } @Override - public ProjectInfo get(final String projectId, ProjectGetOption... options) { + public Project get(final String projectId, ProjectGetOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.cloudresourcemanager.model.Project answer = runWithRetries( @@ -111,13 +113,13 @@ public com.google.api.services.cloudresourcemanager.model.Project call() { return resourceManagerRpc.get(projectId, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : ProjectInfo.fromPb(answer); + return answer == null ? null : Project.fromPb(this, answer); } catch (RetryHelperException e) { throw ResourceManagerException.translateAndThrow(e); } } - private static class ProjectPageFetcher implements NextPageFetcher { + private static class ProjectPageFetcher implements NextPageFetcher { private static final long serialVersionUID = 2158209410430566961L; private final Map requestOptions; @@ -131,17 +133,18 @@ private static class ProjectPageFetcher implements NextPageFetcher } @Override - public Page nextPage() { + public Page nextPage() { return listProjects(serviceOptions, requestOptions); } } @Override - public Page list(ProjectListOption... options) { + public Page list(ProjectListOption... options) { return listProjects(options(), optionMap(options)); } - private static Page listProjects(final ResourceManagerOptions serviceOptions, + private static Page listProjects( + final ResourceManagerOptions serviceOptions, final Map optionsMap) { try { Tuple> result = @@ -155,16 +158,16 @@ Iterable> call() { }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable projects = + Iterable projects = result.y() == null - ? ImmutableList.of() : Iterables.transform( + ? ImmutableList.of() : Iterables.transform( result.y(), new Function() { + Project>() { @Override - public ProjectInfo apply( + public Project apply( com.google.api.services.cloudresourcemanager.model.Project projectPb) { - return ProjectInfo.fromPb(projectPb); + return Project.fromPb(serviceOptions.service(), projectPb); } }); return new PageImpl<>( @@ -175,9 +178,11 @@ public ProjectInfo apply( } @Override - public ProjectInfo replace(final ProjectInfo newProject) { + public Project replace(final Project newProject) { try { - return ProjectInfo.fromPb(runWithRetries( + return Project.fromPb( + this, + runWithRetries( new Callable() { @Override public com.google.api.services.cloudresourcemanager.model.Project call() { diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java index b8687fbf1314..78c7e2402293 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java @@ -21,17 +21,17 @@ *

 {@code
  * ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service();
  * String myProjectId = "my-globally-unique-project-id"; // Change to a unique project ID.
- * ProjectInfo myProject = resourceManager.create(ProjectInfo.builder(myProjectId).build());
- * ProjectInfo newProjectInfo = resourceManager.replace(myProject.toBuilder()
+ * Project myProject = Project.builder(resourceManager, myProjectId).build().create();
+ * Project newProject = myProject.replace(myProject.toBuilder()
  *     .addLabel("launch-status", "in-development").build());
- * Iterator projectIterator = resourceManager.list().iterateAll();
+ * Iterator projectIterator = resourceManager.list().iterateAll();
  * System.out.println("Projects I can view:");
  * while (projectIterator.hasNext()) {
  *   System.out.println(projectIterator.next().projectId());
  * }}
* *

Remember that you must authenticate using the Google Cloud SDK. See more about - * providing + * providing * credentials here. * * @see Google Cloud Resource Manager diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java deleted file mode 100644 index 3aaef8047322..000000000000 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.resourcemanager; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; - -import com.google.common.collect.ImmutableMap; - -import org.junit.Test; - -import java.util.Map; - -public class ProjectInfoTest { - - private static final String PROJECT_ID = "project-id"; - private static final String NAME = "myProj"; - private static final Map LABELS = ImmutableMap.of("k1", "v1", "k2", "v2"); - private static final Long PROJECT_NUMBER = 123L; - private static final Long CREATE_TIME_MILLIS = 123456789L; - private static final ProjectInfo.State STATE = ProjectInfo.State.DELETE_REQUESTED; - private static final ProjectInfo.ResourceId PARENT = - new ProjectInfo.ResourceId("id", "organization"); - private static final ProjectInfo FULL_PROJECT_INFO = ProjectInfo.builder(PROJECT_ID) - .name(NAME) - .labels(LABELS) - .projectNumber(PROJECT_NUMBER) - .createTimeMillis(CREATE_TIME_MILLIS) - .state(STATE) - .parent(PARENT) - .build(); - private static final ProjectInfo PARTIAL_PROJECT_INFO = ProjectInfo.builder(PROJECT_ID).build(); - private static final ProjectInfo UNNAMED_PROJECT_FROM_LIST = - PARTIAL_PROJECT_INFO.toBuilder().name("Unnamed").build(); - - @Test - public void testBuilder() { - assertEquals(PROJECT_ID, FULL_PROJECT_INFO.projectId()); - assertEquals(NAME, FULL_PROJECT_INFO.name()); - assertEquals(LABELS, FULL_PROJECT_INFO.labels()); - assertEquals(PROJECT_NUMBER, FULL_PROJECT_INFO.projectNumber()); - assertEquals(CREATE_TIME_MILLIS, FULL_PROJECT_INFO.createTimeMillis()); - assertEquals(STATE, FULL_PROJECT_INFO.state()); - - assertEquals(PROJECT_ID, PARTIAL_PROJECT_INFO.projectId()); - assertEquals(null, PARTIAL_PROJECT_INFO.name()); - assertTrue(PARTIAL_PROJECT_INFO.labels().isEmpty()); - assertEquals(null, PARTIAL_PROJECT_INFO.projectNumber()); - assertEquals(null, PARTIAL_PROJECT_INFO.createTimeMillis()); - assertEquals(null, PARTIAL_PROJECT_INFO.state()); - } - - @Test - public void testToBuilder() { - compareProjects(FULL_PROJECT_INFO, FULL_PROJECT_INFO.toBuilder().build()); - compareProjects(PARTIAL_PROJECT_INFO, PARTIAL_PROJECT_INFO.toBuilder().build()); - } - - @Test - public void testToAndFromPb() { - assertTrue(FULL_PROJECT_INFO.toPb().getCreateTime().endsWith("Z")); - compareProjects(FULL_PROJECT_INFO, ProjectInfo.fromPb(FULL_PROJECT_INFO.toPb())); - compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.fromPb(PARTIAL_PROJECT_INFO.toPb())); - compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.fromPb(UNNAMED_PROJECT_FROM_LIST.toPb())); - } - - @Test - public void testEquals() { - compareProjects( - FULL_PROJECT_INFO, - ProjectInfo.builder(PROJECT_ID) - .name(NAME) - .labels(LABELS) - .projectNumber(PROJECT_NUMBER) - .createTimeMillis(CREATE_TIME_MILLIS) - .state(STATE) - .parent(PARENT) - .build()); - compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.builder(PROJECT_ID).build()); - assertNotEquals(FULL_PROJECT_INFO, PARTIAL_PROJECT_INFO); - } - - private void compareProjects(ProjectInfo expected, ProjectInfo value) { - assertEquals(expected, value); - assertEquals(expected.projectId(), value.projectId()); - assertEquals(expected.name(), value.name()); - assertEquals(expected.labels(), value.labels()); - assertEquals(expected.projectNumber(), value.projectNumber()); - assertEquals(expected.createTimeMillis(), value.createTimeMillis()); - assertEquals(expected.state(), value.state()); - assertEquals(expected.parent(), value.parent()); - } -} - diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java index 077818bf2bb9..7bccde5a5bf5 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java @@ -16,14 +16,16 @@ package com.google.gcloud.resourcemanager; -import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; import com.google.common.collect.ImmutableMap; @@ -39,22 +41,34 @@ public class ProjectTest { private static final Map LABELS = ImmutableMap.of("k1", "v1", "k2", "v2"); private static final Long PROJECT_NUMBER = 123L; private static final Long CREATE_TIME_MILLIS = 123456789L; - private static final ProjectInfo.State STATE = ProjectInfo.State.DELETE_REQUESTED; - private static final ProjectInfo PROJECT_INFO = ProjectInfo.builder(PROJECT_ID) - .name(NAME) - .labels(LABELS) - .projectNumber(PROJECT_NUMBER) - .createTimeMillis(CREATE_TIME_MILLIS) - .state(STATE) - .build(); - + private static final Project.State STATE = Project.State.DELETE_REQUESTED; + private static final Project.ResourceId PARENT = new Project.ResourceId("id", "organization"); private ResourceManager resourceManager; - private Project project; + private Project fullProject; + private Project fullProjectFromConstructor; + private Project partialProject; + private Project unnamedProjectFromList; + + private void initializeProjects() { + fullProject = + Project.builder(resourceManager, PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build(); + partialProject = Project.builder(resourceManager, PROJECT_ID).build(); + unnamedProjectFromList = partialProject.toBuilder().name("Unnamed").build(); + } @Before - public void setUp() throws Exception { - resourceManager = createStrictMock(ResourceManager.class); - project = new Project(resourceManager, PROJECT_INFO); + public void setup() { + resourceManager = createMock(ResourceManager.class); + fullProjectFromConstructor = new Project(NAME, PROJECT_ID, LABELS, PROJECT_NUMBER, STATE, + CREATE_TIME_MILLIS, PARENT, null, resourceManager); + expect(resourceManager.options()).andReturn(null).anyTimes(); } @After @@ -63,74 +77,160 @@ public void tearDown() throws Exception { } @Test - public void testLoad() { - expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(PROJECT_INFO); + public void testBuilder() { + replay(resourceManager); + initializeProjects(); + assertEquals(PROJECT_ID, fullProject.projectId()); + assertEquals(NAME, fullProject.name()); + assertEquals(LABELS, fullProject.labels()); + assertEquals(PROJECT_NUMBER, fullProject.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, fullProject.createTimeMillis()); + assertEquals(STATE, fullProject.state()); + assertEquals(PARENT, fullProject.parent()); + assertEquals(resourceManager.options(), fullProject.resourceManager().options()); + + assertEquals(PROJECT_ID, partialProject.projectId()); + assertEquals(null, partialProject.name()); + assertTrue(partialProject.labels().isEmpty()); + assertEquals(null, partialProject.projectNumber()); + assertEquals(null, partialProject.createTimeMillis()); + assertEquals(null, partialProject.state()); + assertEquals(null, partialProject.parent()); + assertEquals(resourceManager.options(), partialProject.resourceManager().options()); + } + + @Test + public void testToBuilder() { replay(resourceManager); - Project loadedProject = Project.get(resourceManager, PROJECT_INFO.projectId()); - assertEquals(PROJECT_INFO, loadedProject.info()); + initializeProjects(); + compareProjects(fullProject, fullProject.toBuilder().build()); + compareProjects(partialProject, partialProject.toBuilder().build()); + } + + @Test + public void testToAndFromPb() { + replay(resourceManager); + initializeProjects(); + assertTrue(fullProject.toPb().getCreateTime().endsWith("Z")); + compareProjects(fullProject, Project.fromPb(resourceManager, fullProject.toPb())); + compareProjects(partialProject, Project.fromPb(resourceManager, partialProject.toPb())); + compareProjects(partialProject, Project.fromPb(resourceManager, unnamedProjectFromList.toPb())); + } + + @Test + public void testEquals() { + replay(resourceManager); + initializeProjects(); + compareProjects( + fullProject, + Project.builder(resourceManager, PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build()); + compareProjects(partialProject, Project.builder(resourceManager, PROJECT_ID).build()); + assertNotEquals(fullProject, partialProject); + } + + @Test + public void testCreate() { + expect(resourceManager.get(PROJECT_ID)).andReturn(fullProjectFromConstructor); + replay(resourceManager); + initializeProjects(); + Project loadedProject = Project.get(resourceManager, fullProject.projectId()); + assertEquals(fullProject, loadedProject); + } + + @Test + public void testGet() { + expect(resourceManager.get(PROJECT_ID)).andReturn(fullProjectFromConstructor); + replay(resourceManager); + initializeProjects(); + Project loadedProject = Project.get(resourceManager, fullProject.projectId()); + assertEquals(fullProject, loadedProject); } @Test public void testReload() { - ProjectInfo newInfo = PROJECT_INFO.toBuilder().addLabel("k3", "v3").build(); - expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(newInfo); + Map newLabels = ImmutableMap.of("k1", "v1", "k2", "v2", "k3", "v3"); + Project project = new Project(NAME, PROJECT_ID, newLabels, PROJECT_NUMBER, STATE, + CREATE_TIME_MILLIS, PARENT, null, resourceManager); + expect(resourceManager.get(PROJECT_ID)).andReturn(project); replay(resourceManager); + initializeProjects(); Project newProject = project.reload(); assertSame(resourceManager, newProject.resourceManager()); - assertEquals(newInfo, newProject.info()); + assertEquals(project, newProject); } @Test - public void testLoadNull() { - expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(null); + public void testGetNull() { + expect(resourceManager.get(PROJECT_ID)).andReturn(null); replay(resourceManager); - assertNull(Project.get(resourceManager, PROJECT_INFO.projectId())); + initializeProjects(); + assertNull(Project.get(resourceManager, fullProject.projectId())); } @Test public void testReloadDeletedProject() { - expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(PROJECT_INFO); - expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(null); + expect(resourceManager.get(PROJECT_ID)).andReturn(fullProjectFromConstructor); + expect(resourceManager.get(PROJECT_ID)).andReturn(null); replay(resourceManager); - Project loadedProject = Project.get(resourceManager, PROJECT_INFO.projectId()); + initializeProjects(); + Project loadedProject = Project.get(resourceManager, fullProject.projectId()); assertNotNull(loadedProject); Project reloadedProject = loadedProject.reload(); assertNull(reloadedProject); } - @Test - public void testInfo() { - replay(resourceManager); - assertEquals(PROJECT_INFO, project.info()); - } - @Test public void testResourceManager() { replay(resourceManager); - assertEquals(resourceManager, project.resourceManager()); + initializeProjects(); + assertEquals(resourceManager, fullProject.resourceManager()); } @Test public void testDelete() { - resourceManager.delete(PROJECT_INFO.projectId()); + resourceManager.delete(PROJECT_ID); replay(resourceManager); - project.delete(); + initializeProjects(); + fullProject.delete(); } @Test public void testUndelete() { - resourceManager.undelete(PROJECT_INFO.projectId()); + resourceManager.undelete(PROJECT_ID); replay(resourceManager); - project.undelete(); + initializeProjects(); + fullProject.undelete(); } @Test public void testReplace() { - ProjectInfo newInfo = PROJECT_INFO.toBuilder().addLabel("k3", "v3").build(); - expect(resourceManager.replace(newInfo)).andReturn(newInfo); + Map newLabels = ImmutableMap.of("k1", "v1", "k2", "v2", "k3", "v3"); + Project expected = new Project(NAME, PROJECT_ID, newLabels, PROJECT_NUMBER, STATE, + CREATE_TIME_MILLIS, PARENT, null, resourceManager); + expect(resourceManager.replace(expected)).andReturn(expected); replay(resourceManager); - Project newProject = project.replace(newInfo); - assertSame(resourceManager, newProject.resourceManager()); - assertEquals(newInfo, newProject.info()); + initializeProjects(); + Project actual = expected.replace(expected); + assertSame(resourceManager, actual.resourceManager()); + compareProjects(expected, actual); + } + + private void compareProjects(Project expected, Project value) { + assertEquals(expected, value); + assertEquals(expected.projectId(), value.projectId()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.labels(), value.labels()); + assertEquals(expected.projectNumber(), value.projectNumber()); + assertEquals(expected.createTimeMillis(), value.createTimeMillis()); + assertEquals(expected.state(), value.state()); + assertEquals(expected.parent(), value.parent()); + assertEquals(expected.resourceManager().options(), value.resourceManager().options()); } } diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java index fedd10eacdc6..a7c62dd1c69b 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java @@ -25,7 +25,7 @@ import com.google.common.collect.ImmutableMap; import com.google.gcloud.Page; -import com.google.gcloud.resourcemanager.ProjectInfo.ResourceId; +import com.google.gcloud.resourcemanager.Project.ResourceId; import com.google.gcloud.resourcemanager.ResourceManager.ProjectField; import com.google.gcloud.resourcemanager.ResourceManager.ProjectGetOption; import com.google.gcloud.resourcemanager.ResourceManager.ProjectListOption; @@ -55,9 +55,11 @@ public class ResourceManagerImplTest { ProjectListOption.fields(ProjectField.NAME, ProjectField.LABELS); private static final ProjectListOption LIST_FILTER = ProjectListOption.filter("id:* name:myProject labels.color:blue LABELS.SIZE:*"); - private static final ProjectInfo PARTIAL_PROJECT = ProjectInfo.builder("partial-project").build(); + private static final Project PARTIAL_PROJECT = + Project.builder(RESOURCE_MANAGER, "partial-project").build(); private static final ResourceId PARENT = new ResourceId("id", "type"); - private static final ProjectInfo COMPLETE_PROJECT = ProjectInfo.builder("complete-project") + private static final Project COMPLETE_PROJECT = + Project.builder(RESOURCE_MANAGER, "complete-project") .name("name") .labels(ImmutableMap.of("k1", "v1")) .parent(PARENT) @@ -78,7 +80,7 @@ public void setUp() { } private void clearProjects() { - for (ProjectInfo project : RESOURCE_MANAGER.list().values()) { + for (Project project : RESOURCE_MANAGER.list().values()) { RESOURCE_MANAGER_HELPER.removeProject(project.projectId()); } } @@ -88,7 +90,7 @@ public static void afterClass() { RESOURCE_MANAGER_HELPER.stop(); } - private void compareReadWriteFields(ProjectInfo expected, ProjectInfo actual) { + private void compareReadWriteFields(Project expected, Project actual) { assertEquals(expected.projectId(), actual.projectId()); assertEquals(expected.name(), actual.name()); assertEquals(expected.labels(), actual.labels()); @@ -97,9 +99,9 @@ private void compareReadWriteFields(ProjectInfo expected, ProjectInfo actual) { @Test public void testCreate() { - ProjectInfo returnedProject = RESOURCE_MANAGER.create(PARTIAL_PROJECT); + Project returnedProject = RESOURCE_MANAGER.create(PARTIAL_PROJECT); compareReadWriteFields(PARTIAL_PROJECT, returnedProject); - assertEquals(ProjectInfo.State.ACTIVE, returnedProject.state()); + assertEquals(Project.State.ACTIVE, returnedProject.state()); assertNull(returnedProject.name()); assertNull(returnedProject.parent()); assertNotNull(returnedProject.projectNumber()); @@ -114,7 +116,7 @@ public void testCreate() { } returnedProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); compareReadWriteFields(COMPLETE_PROJECT, returnedProject); - assertEquals(ProjectInfo.State.ACTIVE, returnedProject.state()); + assertEquals(Project.State.ACTIVE, returnedProject.state()); assertNotNull(returnedProject.projectNumber()); assertNotNull(returnedProject.createTimeMillis()); } @@ -123,7 +125,8 @@ public void testCreate() { public void testDelete() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); RESOURCE_MANAGER.delete(COMPLETE_PROJECT.projectId()); - assertEquals(ProjectInfo.State.DELETE_REQUESTED, + assertEquals( + Project.State.DELETE_REQUESTED, RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()).state()); try { RESOURCE_MANAGER.delete("some-nonexistant-project-id"); @@ -137,7 +140,7 @@ public void testDelete() { @Test public void testGet() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); - ProjectInfo returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); + Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); compareReadWriteFields(COMPLETE_PROJECT, returnedProject); RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.projectId()); assertNull(RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId())); @@ -145,8 +148,8 @@ public void testGet() { @Test public void testGetWithOptions() { - ProjectInfo originalProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); - ProjectInfo returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId(), GET_FIELDS); + Project originalProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId(), GET_FIELDS); assertFalse(COMPLETE_PROJECT.equals(returnedProject)); assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); @@ -159,11 +162,11 @@ public void testGetWithOptions() { @Test public void testList() { - Page projects = RESOURCE_MANAGER.list(); + Page projects = RESOURCE_MANAGER.list(); assertFalse(projects.values().iterator().hasNext()); // TODO: change this when #421 is resolved RESOURCE_MANAGER.create(PARTIAL_PROJECT); RESOURCE_MANAGER.create(COMPLETE_PROJECT); - for (ProjectInfo p : RESOURCE_MANAGER.list().values()) { + for (Project p : RESOURCE_MANAGER.list().values()) { if (p.projectId().equals(PARTIAL_PROJECT.projectId())) { compareReadWriteFields(PARTIAL_PROJECT, p); } else if (p.projectId().equals(COMPLETE_PROJECT.projectId())) { @@ -177,8 +180,8 @@ public void testList() { @Test public void testListFieldOptions() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); - Page projects = RESOURCE_MANAGER.list(LIST_FIELDS); - ProjectInfo returnedProject = projects.iterateAll().next(); + Page projects = RESOURCE_MANAGER.list(LIST_FIELDS); + Project returnedProject = projects.iterateAll().next(); assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); assertEquals(COMPLETE_PROJECT.labels(), returnedProject.labels()); @@ -190,24 +193,28 @@ public void testListFieldOptions() { @Test public void testListFilterOptions() { - ProjectInfo matchingProject = ProjectInfo.builder("matching-project") + Project matchingProject = + Project.builder(RESOURCE_MANAGER, "matching-project") .name("MyProject") .labels(ImmutableMap.of("color", "blue", "size", "big")) .build(); - ProjectInfo nonMatchingProject1 = ProjectInfo.builder("non-matching-project1") + Project nonMatchingProject1 = + Project.builder(RESOURCE_MANAGER, "non-matching-project1") .name("myProject") .labels(ImmutableMap.of("color", "blue")) .build(); - ProjectInfo nonMatchingProject2 = ProjectInfo.builder("non-matching-project2") + Project nonMatchingProject2 = + Project.builder(RESOURCE_MANAGER, "non-matching-project2") .name("myProj") .labels(ImmutableMap.of("color", "blue", "size", "big")) .build(); - ProjectInfo nonMatchingProject3 = ProjectInfo.builder("non-matching-project3").build(); + Project nonMatchingProject3 = + Project.builder(RESOURCE_MANAGER, "non-matching-project3").build(); RESOURCE_MANAGER.create(matchingProject); RESOURCE_MANAGER.create(nonMatchingProject1); RESOURCE_MANAGER.create(nonMatchingProject2); RESOURCE_MANAGER.create(nonMatchingProject3); - for (ProjectInfo p : RESOURCE_MANAGER.list(LIST_FILTER).values()) { + for (Project p : RESOURCE_MANAGER.list(LIST_FILTER).values()) { assertFalse(p.equals(nonMatchingProject1)); assertFalse(p.equals(nonMatchingProject2)); compareReadWriteFields(matchingProject, p); @@ -216,22 +223,23 @@ public void testListFilterOptions() { @Test public void testReplace() { - ProjectInfo createdProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Project createdProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); Map newLabels = ImmutableMap.of("new k1", "new v1"); - ProjectInfo anotherCompleteProject = ProjectInfo.builder(COMPLETE_PROJECT.projectId()) + Project anotherCompleteProject = + Project.builder(RESOURCE_MANAGER, COMPLETE_PROJECT.projectId()) .labels(newLabels) .projectNumber(987654321L) .createTimeMillis(230682061315L) - .state(ProjectInfo.State.DELETE_REQUESTED) + .state(Project.State.DELETE_REQUESTED) .parent(createdProject.parent()) .build(); - ProjectInfo returnedProject = RESOURCE_MANAGER.replace(anotherCompleteProject); + Project returnedProject = RESOURCE_MANAGER.replace(anotherCompleteProject); compareReadWriteFields(anotherCompleteProject, returnedProject); assertEquals(createdProject.projectNumber(), returnedProject.projectNumber()); assertEquals(createdProject.createTimeMillis(), returnedProject.createTimeMillis()); assertEquals(createdProject.state(), returnedProject.state()); - ProjectInfo nonexistantProject = - ProjectInfo.builder("some-project-id-that-does-not-exist").build(); + Project nonexistantProject = + Project.builder(RESOURCE_MANAGER, "some-project-id-that-does-not-exist").build(); try { RESOURCE_MANAGER.replace(nonexistantProject); fail("Should fail because the project doesn't exist."); @@ -246,12 +254,12 @@ public void testUndelete() { RESOURCE_MANAGER.create(COMPLETE_PROJECT); RESOURCE_MANAGER.delete(COMPLETE_PROJECT.projectId()); assertEquals( - ProjectInfo.State.DELETE_REQUESTED, + Project.State.DELETE_REQUESTED, RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()).state()); RESOURCE_MANAGER.undelete(COMPLETE_PROJECT.projectId()); - ProjectInfo revivedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); + Project revivedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); compareReadWriteFields(COMPLETE_PROJECT, revivedProject); - assertEquals(ProjectInfo.State.ACTIVE, revivedProject.state()); + assertEquals(Project.State.ACTIVE, revivedProject.state()); try { RESOURCE_MANAGER.undelete("invalid-project-id"); fail("Should fail because the project doesn't exist."); @@ -276,7 +284,7 @@ public void testRetryableException() { .andThrow(new ResourceManagerException(500, "Internal Error", true)) .andReturn(PARTIAL_PROJECT.toPb()); EasyMock.replay(resourceManagerRpcMock); - ProjectInfo returnedProject = resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + Project returnedProject = resourceManagerMock.get(PARTIAL_PROJECT.projectId()); assertEquals(PARTIAL_PROJECT, returnedProject); } diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java index 64e09449149b..eb6b3f080f58 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java @@ -35,16 +35,18 @@ public class SerializationTest { - private static final ProjectInfo PARTIAL_PROJECT_INFO = ProjectInfo.builder("id1").build(); - private static final ProjectInfo FULL_PROJECT_INFO = ProjectInfo.builder("id") +private static final ResourceManager RESOURCE_MANAGER = + ResourceManagerOptions.defaultInstance().service(); + private static final Project PARTIAL_PROJECT = Project.builder(RESOURCE_MANAGER, "id1").build(); + private static final Project FULL_PROJECT = Project.builder(RESOURCE_MANAGER, "id") .name("name") .labels(ImmutableMap.of("key", "value")) .projectNumber(123L) - .state(ProjectInfo.State.ACTIVE) + .state(Project.State.ACTIVE) .createTimeMillis(1234L) .build(); - private static final PageImpl PAGE_RESULT = - new PageImpl<>(null, "c", Collections.singletonList(PARTIAL_PROJECT_INFO)); + private static final PageImpl PAGE_RESULT = + new PageImpl<>(null, "c", Collections.singletonList(PARTIAL_PROJECT)); private static final ResourceManager.ProjectGetOption PROJECT_GET_OPTION = ResourceManager.ProjectGetOption.fields(ResourceManager.ProjectField.NAME); private static final ResourceManager.ProjectListOption PROJECT_LIST_OPTION = @@ -65,8 +67,8 @@ public void testServiceOptions() throws Exception { @Test public void testModelAndRequests() throws Exception { - Serializable[] objects = {PARTIAL_PROJECT_INFO, FULL_PROJECT_INFO, PAGE_RESULT, - PROJECT_GET_OPTION, PROJECT_LIST_OPTION}; + Serializable[] objects = { + PARTIAL_PROJECT, FULL_PROJECT, PAGE_RESULT, PROJECT_GET_OPTION, PROJECT_LIST_OPTION}; for (Serializable obj : objects) { Object copy = serializeAndDeserialize(obj); assertEquals(obj, obj); From ae1e2ba269f6c25b566889459311af9e9b08c967 Mon Sep 17 00:00:00 2001 From: Ajay Kannan Date: Tue, 19 Jan 2016 16:10:43 -0800 Subject: [PATCH 2/2] Merge storage objects (+ some minor fixes for examples, ITBigQueryTest, resource manager) --- .../gcloud/bigquery/ITBigQueryTest.java | 16 +- .../gcloud/examples/StorageExample.java | 75 +- .../gcloud/resourcemanager/Project.java | 18 +- .../gcloud/resourcemanager/ProjectTest.java | 51 +- .../ResourceManagerImplTest.java | 3 +- .../google/gcloud/storage/BatchRequest.java | 10 +- .../google/gcloud/storage/BatchResponse.java | 12 +- .../java/com/google/gcloud/storage/Blob.java | 876 ++++++++++++++-- .../com/google/gcloud/storage/BlobInfo.java | 699 ------------- .../gcloud/storage/BlobWriteChannel.java | 27 +- .../com/google/gcloud/storage/Bucket.java | 947 ++++++++++++++++-- .../com/google/gcloud/storage/BucketInfo.java | 814 --------------- .../com/google/gcloud/storage/CopyWriter.java | 24 +- .../com/google/gcloud/storage/Storage.java | 168 ++-- .../google/gcloud/storage/StorageImpl.java | 217 ++-- .../google/gcloud/storage/package-info.java | 4 +- .../storage/testing/RemoteGcsHelper.java | 6 +- .../gcloud/storage/testing/package-info.java | 2 +- .../gcloud/storage/BatchRequestTest.java | 47 +- .../gcloud/storage/BatchResponseTest.java | 36 +- .../google/gcloud/storage/BlobInfoTest.java | 160 --- .../com/google/gcloud/storage/BlobTest.java | 387 +++++-- .../gcloud/storage/BlobWriteChannelTest.java | 47 +- .../google/gcloud/storage/BucketInfoTest.java | 172 ---- .../com/google/gcloud/storage/BucketTest.java | 361 +++++-- .../gcloud/storage/CopyRequestTest.java | 65 +- .../google/gcloud/storage/CopyWriterTest.java | 46 +- .../google/gcloud/storage/ITStorageTest.java | 216 ++-- .../gcloud/storage/RemoteGcsHelperTest.java | 97 +- .../gcloud/storage/SerializationTest.java | 22 +- .../gcloud/storage/StorageImplTest.java | 713 +++++++------ 31 files changed, 3219 insertions(+), 3119 deletions(-) delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java delete mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java delete mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java index 528df30d0a61..524f8d9810de 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java @@ -34,8 +34,8 @@ import com.google.gcloud.Page; import com.google.gcloud.bigquery.BigQuery.DatasetOption; import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; -import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.Bucket; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.testing.RemoteGcsHelper; @@ -145,10 +145,16 @@ public static void beforeClass() throws IOException, InterruptedException { RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); bigquery = bigqueryHelper.options().service(); storage = gcsHelper.options().service(); - storage.create(BucketInfo.of(BUCKET)); - storage.create(BlobInfo.builder(BUCKET, LOAD_FILE).contentType("text/plain").build(), + Bucket.of(storage, BUCKET).create(); + Blob.builder(storage, BUCKET, LOAD_FILE) + .contentType("text/plain") + .build() + .create( CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); - storage.create(BlobInfo.builder(BUCKET, JSON_LOAD_FILE).contentType("application/json").build(), + Blob.builder(storage, BUCKET, JSON_LOAD_FILE) + .contentType("application/json") + .build() + .create( JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); bigquery.create(info); diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java index e3bee626f49c..0141238d9230 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java @@ -23,9 +23,7 @@ import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; -import com.google.gcloud.storage.BlobInfo; import com.google.gcloud.storage.Bucket; -import com.google.gcloud.storage.BucketInfo; import com.google.gcloud.storage.CopyWriter; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.Storage.ComposeRequest; @@ -93,7 +91,7 @@ private abstract static class StorageAction { abstract void run(Storage storage, T request) throws Exception; - abstract T parse(String... args) throws Exception; + abstract T parse(Storage storage, String... args) throws Exception; protected String params() { return ""; @@ -103,7 +101,7 @@ protected String params() { private abstract static class BlobsAction extends StorageAction { @Override - BlobId[] parse(String... args) { + BlobId[] parse(Storage storage, String... args) { if (args.length < 2) { throw new IllegalArgumentException(); } @@ -138,7 +136,7 @@ public void run(Storage storage, BlobId... blobIds) { System.out.println("No such bucket"); return; } - System.out.println("Bucket info: " + bucket.info()); + System.out.println("Bucket info: " + bucket); } else { // get Blob Blob blob = Blob.get(storage, blobIds[0]); @@ -146,25 +144,25 @@ public void run(Storage storage, BlobId... blobIds) { System.out.println("No such object"); return; } - System.out.println("Blob info: " + blob.info()); + System.out.println("Blob info: " + blob); } } else { // use batch to get multiple blobs. List blobs = Blob.get(storage, Arrays.asList(blobIds)); for (Blob blob : blobs) { if (blob != null) { - System.out.println(blob.info()); + System.out.println(blob); } } } } @Override - BlobId[] parse(String... args) { + BlobId[] parse(Storage storage, String... args) { if (args.length < 2) { return new BlobId[] {BlobId.of(args[0], "")}; } - return super.parse(args); + return super.parse(storage, args); } @Override @@ -204,7 +202,7 @@ public void run(Storage storage, BlobId... blobIds) { private static class ListAction extends StorageAction { @Override - String parse(String... args) { + String parse(Storage storage, String... args) { if (args.length == 0) { return null; } @@ -218,9 +216,9 @@ String parse(String... args) { public void run(Storage storage, String bucketName) { if (bucketName == null) { // list buckets - Iterator bucketInfoIterator = storage.list().iterateAll(); - while (bucketInfoIterator.hasNext()) { - System.out.println(bucketInfoIterator.next()); + Iterator bucketIterator = storage.list().iterateAll(); + while (bucketIterator.hasNext()) { + System.out.println(bucketIterator.next()); } } else { // list a bucket's blobs @@ -231,7 +229,7 @@ public void run(Storage storage, String bucketName) { } Iterator blobIterator = bucket.list().iterateAll(); while (blobIterator.hasNext()) { - System.out.println(blobIterator.next().info()); + System.out.println(blobIterator.next()); } } } @@ -247,17 +245,16 @@ public String params() { * * @see Objects: insert */ - private static class UploadAction extends StorageAction> { + private static class UploadAction extends StorageAction> { @Override - public void run(Storage storage, Tuple tuple) throws Exception { + public void run(Storage storage, Tuple tuple) throws Exception { run(storage, tuple.x(), tuple.y()); } - private void run(Storage storage, Path uploadFrom, BlobInfo blobInfo) throws IOException { + private void run(Storage storage, Path uploadFrom, Blob blob) throws IOException { if (Files.size(uploadFrom) > 1_000_000) { // When content is not available or large (1MB or more) it is recommended // to write it in chunks via the blob's channel writer. - Blob blob = new Blob(storage, blobInfo); try (WriteChannel writer = blob.writer()) { byte[] buffer = new byte[1024]; try (InputStream input = Files.newInputStream(uploadFrom)) { @@ -274,20 +271,20 @@ private void run(Storage storage, Path uploadFrom, BlobInfo blobInfo) throws IOE } else { byte[] bytes = Files.readAllBytes(uploadFrom); // create the blob in one request. - storage.create(blobInfo, bytes); + storage.create(blob, bytes); } System.out.println("Blob was created"); } @Override - Tuple parse(String... args) throws IOException { + Tuple parse(Storage storage, String... args) throws IOException { if (args.length < 2 || args.length > 3) { throw new IllegalArgumentException(); } Path path = Paths.get(args[0]); String contentType = Files.probeContentType(path); String blob = args.length < 3 ? path.getFileName().toString() : args[2]; - return Tuple.of(path, BlobInfo.builder(args[1], blob).contentType(contentType).build()); + return Tuple.of(path, Blob.builder(storage, args[1], blob).contentType(contentType).build()); } @Override @@ -320,7 +317,7 @@ private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOExcep if (downloadTo != null) { writeTo = new PrintStream(new FileOutputStream(downloadTo.toFile())); } - if (blob.info().size() < 1_000_000) { + if (blob.size() < 1_000_000) { // Blob is small read all its content in one request byte[] content = blob.content(); writeTo.write(content); @@ -344,7 +341,7 @@ private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOExcep } @Override - Tuple parse(String... args) { + Tuple parse(Storage storage, String... args) { if (args.length < 2 || args.length > 3) { throw new IllegalArgumentException(); } @@ -379,11 +376,11 @@ public void run(Storage storage, CopyRequest request) { } @Override - CopyRequest parse(String... args) { + CopyRequest parse(Storage storage, String... args) { if (args.length != 4) { throw new IllegalArgumentException(); } - return CopyRequest.of(args[0], args[1], BlobId.of(args[2], args[3])); + return CopyRequest.of(storage, args[0], args[1], BlobId.of(args[2], args[3])); } @Override @@ -400,17 +397,17 @@ public String params() { private static class ComposeAction extends StorageAction { @Override public void run(Storage storage, ComposeRequest request) { - BlobInfo composedBlobInfo = storage.compose(request); - System.out.println("Composed " + composedBlobInfo); + Blob composedBlob = storage.compose(request); + System.out.println("Composed " + composedBlob); } @Override - ComposeRequest parse(String... args) { + ComposeRequest parse(Storage storage, String... args) { if (args.length < 3) { throw new IllegalArgumentException(); } ComposeRequest.Builder request = ComposeRequest.builder(); - request.target(BlobInfo.builder(args[0], args[args.length - 1]).build()); + request.target(Blob.builder(storage, args[0], args[args.length - 1]).build()); for (int i = 1; i < args.length - 1; i++) { request.addSource(args[i]); } @@ -443,12 +440,12 @@ private void run(Storage storage, BlobId blobId, Map metadata) { System.out.println("No such object"); return; } - Blob updateBlob = blob.update(blob.info().toBuilder().metadata(metadata).build()); - System.out.println("Updated " + updateBlob.info()); + Blob updateBlob = blob.update(blob.toBuilder().metadata(metadata).build()); + System.out.println("Updated " + updateBlob); } @Override - Tuple> parse(String... args) { + Tuple> parse(Storage storage, String... args) { if (args.length < 2) { throw new IllegalArgumentException(); } @@ -478,25 +475,25 @@ public String params() { * @see Signed URLs */ private static class SignUrlAction extends - StorageAction> { +StorageAction> { private static final char[] PASSWORD = "notasecret".toCharArray(); @Override - public void run(Storage storage, Tuple tuple) + public void run(Storage storage, Tuple tuple) throws Exception { run(storage, tuple.x(), tuple.y()); } - private void run(Storage storage, ServiceAccountAuthCredentials cred, BlobInfo blobInfo) + private void run(Storage storage, ServiceAccountAuthCredentials cred, Blob blob) throws IOException { - Blob blob = new Blob(storage, blobInfo); System.out.println("Signed URL: " + blob.signUrl(1, TimeUnit.DAYS, SignUrlOption.serviceAccount(cred))); } @Override - Tuple parse(String... args) throws IOException, + Tuple parse(Storage storage, String... args) + throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException, UnrecoverableKeyException { if (args.length != 4) { @@ -506,7 +503,7 @@ Tuple parse(String... args) throws IOEx keystore.load(Files.newInputStream(Paths.get(args[0])), PASSWORD); PrivateKey privateKey = (PrivateKey) keystore.getKey("privatekey", PASSWORD); ServiceAccountAuthCredentials cred = AuthCredentials.createFor(args[1], privateKey); - return Tuple.of(cred, BlobInfo.builder(BlobId.of(args[2], args[3])).build()); + return Tuple.of(cred, Blob.builder(storage, BlobId.of(args[2], args[3])).build()); } @Override @@ -569,7 +566,7 @@ public static void main(String... args) throws Exception { Storage storage = optionsBuilder.build().service(); Object request; try { - request = action.parse(args); + request = action.parse(storage, args); } catch (IllegalArgumentException ex) { System.out.println("Invalid input for action '" + actionName + "'"); System.out.println("Expected: " + action.params()); diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java index 9c6b9465cbaf..a0ea9eab3d70 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java @@ -236,8 +236,8 @@ Builder parent(ResourceId parent) { /** * Sets the service object associated with this Project. This service object is used to send - * requests relevant to this blob (e.g. {@link #delete}, {@link #get}, {@link #reload}, - * {@link #replace}, {@link #undelete}). + * requests relevant to this blob (e.g. {@link #delete}, {@link #reload}, {@link #replace}, + * {@link #undelete}). */ public Builder resourceManager(ResourceManager resourceManager) { this.resourceManager = checkNotNull(resourceManager); @@ -261,20 +261,6 @@ public Project build() { this.resourceManager = builder.resourceManager; } - Project(String name, String projectId, Map labels, Long projectNumber, - State state, Long createTimeMillis, ResourceId parent, ResourceManagerOptions options, - ResourceManager resourceManager) { - this.name = name; - this.projectId = projectId; - this.labels = ImmutableMap.copyOf(labels); - this.projectNumber = projectNumber; - this.state = state; - this.createTimeMillis = createTimeMillis; - this.parent = parent; - this.options = options; - this.resourceManager = resourceManager; - } - /** * Get the unique, user-assigned ID of the project. * diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java index 7bccde5a5bf5..aa1f3859e633 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java @@ -22,7 +22,6 @@ import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @@ -44,21 +43,14 @@ public class ProjectTest { private static final Project.State STATE = Project.State.DELETE_REQUESTED; private static final Project.ResourceId PARENT = new Project.ResourceId("id", "organization"); private ResourceManager resourceManager; + private ResourceManager serviceMockToBuildProject; private Project fullProject; - private Project fullProjectFromConstructor; + private Project expectedFullProject; private Project partialProject; private Project unnamedProjectFromList; private void initializeProjects() { - fullProject = - Project.builder(resourceManager, PROJECT_ID) - .name(NAME) - .labels(LABELS) - .projectNumber(PROJECT_NUMBER) - .createTimeMillis(CREATE_TIME_MILLIS) - .state(STATE) - .parent(PARENT) - .build(); + fullProject = expectedFullProject.toBuilder().resourceManager(resourceManager).build(); partialProject = Project.builder(resourceManager, PROJECT_ID).build(); unnamedProjectFromList = partialProject.toBuilder().name("Unnamed").build(); } @@ -66,8 +58,17 @@ private void initializeProjects() { @Before public void setup() { resourceManager = createMock(ResourceManager.class); - fullProjectFromConstructor = new Project(NAME, PROJECT_ID, LABELS, PROJECT_NUMBER, STATE, - CREATE_TIME_MILLIS, PARENT, null, resourceManager); + serviceMockToBuildProject = createMock(ResourceManager.class); + expect(serviceMockToBuildProject.options()).andReturn(null).anyTimes(); + replay(serviceMockToBuildProject); + expectedFullProject = Project.builder(serviceMockToBuildProject, PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build(); expect(resourceManager.options()).andReturn(null).anyTimes(); } @@ -137,7 +138,7 @@ public void testEquals() { @Test public void testCreate() { - expect(resourceManager.get(PROJECT_ID)).andReturn(fullProjectFromConstructor); + expect(resourceManager.get(PROJECT_ID)).andReturn(expectedFullProject); replay(resourceManager); initializeProjects(); Project loadedProject = Project.get(resourceManager, fullProject.projectId()); @@ -146,7 +147,7 @@ public void testCreate() { @Test public void testGet() { - expect(resourceManager.get(PROJECT_ID)).andReturn(fullProjectFromConstructor); + expect(resourceManager.get(PROJECT_ID)).andReturn(expectedFullProject); replay(resourceManager); initializeProjects(); Project loadedProject = Project.get(resourceManager, fullProject.projectId()); @@ -156,13 +157,12 @@ public void testGet() { @Test public void testReload() { Map newLabels = ImmutableMap.of("k1", "v1", "k2", "v2", "k3", "v3"); - Project project = new Project(NAME, PROJECT_ID, newLabels, PROJECT_NUMBER, STATE, - CREATE_TIME_MILLIS, PARENT, null, resourceManager); + Project project = expectedFullProject.toBuilder().labels(newLabels).build(); expect(resourceManager.get(PROJECT_ID)).andReturn(project); replay(resourceManager); initializeProjects(); - Project newProject = project.reload(); - assertSame(resourceManager, newProject.resourceManager()); + Project newProject = fullProject.reload(); + assertSame(serviceMockToBuildProject, newProject.resourceManager()); assertEquals(project, newProject); } @@ -176,14 +176,10 @@ public void testGetNull() { @Test public void testReloadDeletedProject() { - expect(resourceManager.get(PROJECT_ID)).andReturn(fullProjectFromConstructor); expect(resourceManager.get(PROJECT_ID)).andReturn(null); replay(resourceManager); initializeProjects(); - Project loadedProject = Project.get(resourceManager, fullProject.projectId()); - assertNotNull(loadedProject); - Project reloadedProject = loadedProject.reload(); - assertNull(reloadedProject); + assertNull(fullProject.reload()); } @Test @@ -212,13 +208,12 @@ public void testUndelete() { @Test public void testReplace() { Map newLabels = ImmutableMap.of("k1", "v1", "k2", "v2", "k3", "v3"); - Project expected = new Project(NAME, PROJECT_ID, newLabels, PROJECT_NUMBER, STATE, - CREATE_TIME_MILLIS, PARENT, null, resourceManager); + Project expected = expectedFullProject.toBuilder().labels(newLabels).build(); expect(resourceManager.replace(expected)).andReturn(expected); replay(resourceManager); initializeProjects(); - Project actual = expected.replace(expected); - assertSame(resourceManager, actual.resourceManager()); + Project actual = fullProject.replace(expected); + assertSame(serviceMockToBuildProject, actual.resourceManager()); compareProjects(expected, actual); } diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java index a7c62dd1c69b..3ce2d8b9c2bc 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java @@ -285,7 +285,8 @@ public void testRetryableException() { .andReturn(PARTIAL_PROJECT.toPb()); EasyMock.replay(resourceManagerRpcMock); Project returnedProject = resourceManagerMock.get(PARTIAL_PROJECT.projectId()); - assertEquals(PARTIAL_PROJECT, returnedProject); + assertEquals( + PARTIAL_PROJECT.toBuilder().resourceManager(resourceManagerMock).build(), returnedProject); } @Test diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java index bf77c731754e..642a6b5b1311 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java @@ -35,13 +35,13 @@ public final class BatchRequest implements Serializable { private static final long serialVersionUID = -1527992265939800345L; private final Map> toDelete; - private final Map> toUpdate; + private final Map> toUpdate; private final Map> toGet; public static class Builder { private Map> toDelete = new LinkedHashMap<>(); - private Map> toUpdate = new LinkedHashMap<>(); + private Map> toUpdate = new LinkedHashMap<>(); private Map> toGet = new LinkedHashMap<>(); private Builder() {} @@ -65,8 +65,8 @@ public Builder delete(BlobId blob, BlobSourceOption... options) { /** * Update the given blob. */ - public Builder update(BlobInfo blobInfo, BlobTargetOption... options) { - toUpdate.put(blobInfo, Lists.newArrayList(options)); + public Builder update(Blob blob, BlobTargetOption... options) { + toUpdate.put(blob, Lists.newArrayList(options)); return this; } @@ -117,7 +117,7 @@ public Map> toDelete() { return toDelete; } - public Map> toUpdate() { + public Map> toUpdate() { return toUpdate; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java index 98e7ce09cef0..fe5f6f5743c8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java @@ -31,8 +31,8 @@ public final class BatchResponse implements Serializable { private static final long serialVersionUID = 1057416839397037706L; private final List> deleteResult; - private final List> updateResult; - private final List> getResult; + private final List> updateResult; + private final List> getResult; public static class Result implements Serializable { @@ -113,8 +113,8 @@ static Result empty() { } } - BatchResponse(List> deleteResult, List> updateResult, - List> getResult) { + BatchResponse(List> deleteResult, List> updateResult, + List> getResult) { this.deleteResult = ImmutableList.copyOf(deleteResult); this.updateResult = ImmutableList.copyOf(updateResult); this.getResult = ImmutableList.copyOf(getResult); @@ -146,14 +146,14 @@ public List> deletes() { /** * Returns the results for the update operations using the request order. */ - public List> updates() { + public List> updates() { return updateResult; } /** * Returns the results for the get operations using the request order. */ - public List> gets() { + public List> gets() { return getResult; } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java index fe65f6ee010b..93b11ee3a080 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java @@ -16,27 +16,46 @@ package com.google.gcloud.storage; +import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.gcloud.storage.Blob.BlobSourceOption.toGetOptions; import static com.google.gcloud.storage.Blob.BlobSourceOption.toSourceOptions; +import com.google.api.client.util.Data; +import com.google.api.client.util.DateTime; +import com.google.api.services.storage.model.ObjectAccessControl; +import com.google.api.services.storage.model.StorageObject; +import com.google.api.services.storage.model.StorageObject.Owner; import com.google.common.base.Function; +import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import com.google.gcloud.ReadChannel; import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.storage.Storage.BlobTargetOption; import com.google.gcloud.storage.Storage.BlobWriteOption; import com.google.gcloud.storage.Storage.CopyRequest; import com.google.gcloud.storage.Storage.SignUrlOption; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.Serializable; +import java.math.BigInteger; import java.net.URL; +import java.util.AbstractMap; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.TimeUnit; /** @@ -47,47 +66,69 @@ * information use {@link #reload}. *

*/ -public final class Blob { +public final class Blob implements Serializable { - private final Storage storage; - private final BlobInfo info; + private static final long serialVersionUID = -2498298106157261711L; + + private final BlobId blobId; + private final String id; + private final String selfLink; + private final String cacheControl; + private final List acl; + private final Acl.Entity owner; + private final Long size; + private final String etag; + private final String md5; + private final String crc32c; + private final String mediaLink; + private final Map metadata; + private final Long metageneration; + private final Long deleteTime; + private final Long updateTime; + private final String contentType; + private final String contentEncoding; + private final String contentDisposition; + private final String contentLanguage; + private final Integer componentCount; + private final StorageOptions options; + private transient Storage storage; /** * Class for specifying blob source options when {@code Blob} methods are used. */ public static class BlobSourceOption extends Option { - private static final long serialVersionUID = 214616862061934846L; + private static final long serialVersionUID = 4157314244790869346L; private BlobSourceOption(StorageRpc.Option rpcOption) { super(rpcOption, null); } - private Storage.BlobSourceOption toSourceOptions(BlobInfo blobInfo) { + private Storage.BlobSourceOption toSourceOptions(Blob blob) { switch (rpcOption()) { case IF_GENERATION_MATCH: - return Storage.BlobSourceOption.generationMatch(blobInfo.generation()); + return Storage.BlobSourceOption.generationMatch(blob.generation()); case IF_GENERATION_NOT_MATCH: - return Storage.BlobSourceOption.generationNotMatch(blobInfo.generation()); + return Storage.BlobSourceOption.generationNotMatch(blob.generation()); case IF_METAGENERATION_MATCH: - return Storage.BlobSourceOption.metagenerationMatch(blobInfo.metageneration()); + return Storage.BlobSourceOption.metagenerationMatch(blob.metageneration()); case IF_METAGENERATION_NOT_MATCH: - return Storage.BlobSourceOption.metagenerationNotMatch(blobInfo.metageneration()); + return Storage.BlobSourceOption.metagenerationNotMatch(blob.metageneration()); default: throw new AssertionError("Unexpected enum value"); } } - private Storage.BlobGetOption toGetOption(BlobInfo blobInfo) { + private Storage.BlobGetOption toGetOption(Blob blob) { switch (rpcOption()) { case IF_GENERATION_MATCH: - return Storage.BlobGetOption.generationMatch(blobInfo.generation()); + return Storage.BlobGetOption.generationMatch(blob.generation()); case IF_GENERATION_NOT_MATCH: - return Storage.BlobGetOption.generationNotMatch(blobInfo.generation()); + return Storage.BlobGetOption.generationNotMatch(blob.generation()); case IF_METAGENERATION_MATCH: - return Storage.BlobGetOption.metagenerationMatch(blobInfo.metageneration()); + return Storage.BlobGetOption.metagenerationMatch(blob.metageneration()); case IF_METAGENERATION_NOT_MATCH: - return Storage.BlobGetOption.metagenerationNotMatch(blobInfo.metageneration()); + return Storage.BlobGetOption.metagenerationNotMatch(blob.metageneration()); default: throw new AssertionError("Unexpected enum value"); } @@ -125,36 +166,721 @@ public static BlobSourceOption metagenerationNotMatch() { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } - static Storage.BlobSourceOption[] toSourceOptions(BlobInfo blobInfo, + static Storage.BlobSourceOption[] toSourceOptions(Blob blob, BlobSourceOption... options) { Storage.BlobSourceOption[] convertedOptions = new Storage.BlobSourceOption[options.length]; int index = 0; for (BlobSourceOption option : options) { - convertedOptions[index++] = option.toSourceOptions(blobInfo); + convertedOptions[index++] = option.toSourceOptions(blob); } return convertedOptions; } - static Storage.BlobGetOption[] toGetOptions(BlobInfo blobInfo, BlobSourceOption... options) { + static Storage.BlobGetOption[] toGetOptions(Blob blob, BlobSourceOption... options) { Storage.BlobGetOption[] convertedOptions = new Storage.BlobGetOption[options.length]; int index = 0; for (BlobSourceOption option : options) { - convertedOptions[index++] = option.toGetOption(blobInfo); + convertedOptions[index++] = option.toGetOption(blob); } return convertedOptions; } } + static final Function, Blob> FROM_PB_FUNCTION = + new Function, Blob>() { + @Override + public Blob apply(Tuple pb) { + return Blob.fromPb(pb.x(), pb.y()); + } + }; + static final Function TO_PB_FUNCTION = new Function() { + @Override + public StorageObject apply(Blob blob) { + return blob.toPb(); + } + }; + + /** + * This class is meant for internal use only. Users are discouraged from using this class. + */ + public static final class ImmutableEmptyMap extends AbstractMap { + + @Override + public Set> entrySet() { + return ImmutableSet.of(); + } + } + + public static final class Builder { + + private BlobId blobId; + private String id; + private String contentType; + private String contentEncoding; + private String contentDisposition; + private String contentLanguage; + private Integer componentCount; + private String cacheControl; + private List acl; + private Acl.Entity owner; + private Long size; + private String etag; + private String selfLink; + private String md5; + private String crc32c; + private String mediaLink; + private Map metadata; + private Long metageneration; + private Long deleteTime; + private Long updateTime; + private Storage storage; + + private Builder() {} + + private Builder(Blob blob) { + blobId = blob.blobId; + id = blob.id; + cacheControl = blob.cacheControl; + contentEncoding = blob.contentEncoding; + contentType = blob.contentType; + contentDisposition = blob.contentDisposition; + contentLanguage = blob.contentLanguage; + componentCount = blob.componentCount; + acl = blob.acl; + owner = blob.owner; + size = blob.size; + etag = blob.etag; + selfLink = blob.selfLink; + md5 = blob.md5; + crc32c = blob.crc32c; + mediaLink = blob.mediaLink; + metadata = blob.metadata; + metageneration = blob.metageneration; + deleteTime = blob.deleteTime; + updateTime = blob.updateTime; + storage = blob.storage; + } + + /** + * Sets the blob identity. + */ + public Builder blobId(BlobId blobId) { + this.blobId = checkNotNull(blobId); + return this; + } + + Builder id(String id) { + this.id = id; + return this; + } + + /** + * Sets the blob's data content type. + * + * @see Content-Type + */ + public Builder contentType(String contentType) { + this.contentType = firstNonNull(contentType, Data.nullOf(String.class)); + return this; + } + + /** + * Sets the blob's data content disposition. + * + * @see Content-Disposition + */ + public Builder contentDisposition(String contentDisposition) { + this.contentDisposition = firstNonNull(contentDisposition, Data.nullOf(String.class)); + return this; + } + + /** + * Sets the blob's data content language. + * + * @see Content-Language + */ + public Builder contentLanguage(String contentLanguage) { + this.contentLanguage = firstNonNull(contentLanguage, Data.nullOf(String.class)); + return this; + } + + /** + * Sets the blob's data content encoding. + * + * @see Content-Encoding + */ + public Builder contentEncoding(String contentEncoding) { + this.contentEncoding = firstNonNull(contentEncoding, Data.nullOf(String.class)); + return this; + } + + Builder componentCount(Integer componentCount) { + this.componentCount = componentCount; + return this; + } + + /** + * Sets the blob's data cache control. + * + * @see Cache-Control + */ + public Builder cacheControl(String cacheControl) { + this.cacheControl = firstNonNull(cacheControl, Data.nullOf(String.class)); + return this; + } + + /** + * Sets the blob's access control configuration. + * + * @see + * About Access Control Lists + */ + public Builder acl(List acl) { + this.acl = acl != null ? ImmutableList.copyOf(acl) : null; + return this; + } + + Builder owner(Acl.Entity owner) { + this.owner = owner; + return this; + } + + Builder size(Long size) { + this.size = size; + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + /** + * Sets the MD5 hash of blob's data. MD5 value must be encoded in base64. + * + * @see + * Hashes and ETags: Best Practices + */ + public Builder md5(String md5) { + this.md5 = firstNonNull(md5, Data.nullOf(String.class)); + return this; + } + + /** + * Sets the CRC32C checksum of blob's data as described in + * RFC 4960, Appendix B; encoded in + * base64 in big-endian order. + * + * @see + * Hashes and ETags: Best Practices + */ + public Builder crc32c(String crc32c) { + this.crc32c = firstNonNull(crc32c, Data.nullOf(String.class)); + return this; + } + + Builder mediaLink(String mediaLink) { + this.mediaLink = mediaLink; + return this; + } + + /** + * Sets the blob's user provided metadata. + */ + public Builder metadata(Map metadata) { + this.metadata = metadata != null + ? new HashMap<>(metadata) : Data.>nullOf(ImmutableEmptyMap.class); + return this; + } + + Builder metageneration(Long metageneration) { + this.metageneration = metageneration; + return this; + } + + Builder deleteTime(Long deleteTime) { + this.deleteTime = deleteTime; + return this; + } + + Builder updateTime(Long updateTime) { + this.updateTime = updateTime; + return this; + } + + /** + * Sets the service object associated with this blob. This service object is used to send + * requests relevant to this blob. + */ + public Builder storage(Storage storage) { + this.storage = storage; + return this; + } + + /** + * Creates a {@code Blob} object. + */ + public Blob build() { + checkNotNull(blobId); + checkNotNull(storage); + return new Blob(this); + } + } + + private Blob(Builder builder) { + blobId = builder.blobId; + id = builder.id; + cacheControl = builder.cacheControl; + contentEncoding = builder.contentEncoding; + contentType = builder.contentType; + contentDisposition = builder.contentDisposition; + contentLanguage = builder.contentLanguage; + componentCount = builder.componentCount; + acl = builder.acl; + owner = builder.owner; + size = builder.size; + etag = builder.etag; + selfLink = builder.selfLink; + md5 = builder.md5; + crc32c = builder.crc32c; + mediaLink = builder.mediaLink; + metadata = builder.metadata; + metageneration = builder.metageneration; + deleteTime = builder.deleteTime; + updateTime = builder.updateTime; + options = builder.storage.options(); + storage = builder.storage; + } + /** - * Constructs a {@code Blob} object for the provided {@code BlobInfo}. The storage service is used - * to issue requests. + * Returns the blob's identity. + */ + public BlobId blobId() { + return blobId; + } + + /** + * Returns the name of the containing bucket. + */ + public String bucket() { + return blobId().bucket(); + } + + /** + * Returns the blob's id. + */ + public String id() { + return id; + } + + /** + * Returns the blob's name. + */ + public String name() { + return blobId().name(); + } + + /** + * Returns the blob's data cache control. * - * @param storage the storage service used for issuing requests - * @param info blob's info + * @see Cache-Control */ - public Blob(Storage storage, BlobInfo info) { - this.storage = checkNotNull(storage); - this.info = checkNotNull(info); + public String cacheControl() { + return Data.isNull(cacheControl) ? null : cacheControl; + } + + /** + * Returns the blob's access control configuration. + * + * @see + * About Access Control Lists + */ + public List acl() { + return acl; + } + + /** + * Returns the blob's owner. This will always be the uploader of the blob. + */ + public Acl.Entity owner() { + return owner; + } + + /** + * Returns the content length of the data in bytes. + * + * @see Content-Length + */ + public Long size() { + return size; + } + + /** + * Returns the blob's data content type. + * + * @see Content-Type + */ + public String contentType() { + return Data.isNull(contentType) ? null : contentType; + } + + /** + * Returns the blob's data content encoding. + * + * @see Content-Encoding + */ + public String contentEncoding() { + return Data.isNull(contentEncoding) ? null : contentEncoding; + } + + /** + * Returns the blob's data content disposition. + * + * @see Content-Disposition + */ + public String contentDisposition() { + return Data.isNull(contentDisposition) ? null : contentDisposition; + } + + /** + * Returns the blob's data content language. + * + * @see Content-Language + */ + public String contentLanguage() { + return Data.isNull(contentLanguage) ? null : contentLanguage; + } + + /** + * Returns the number of components that make up this blob. Components are accumulated through + * the {@link Storage#compose(Storage.ComposeRequest)} operation and are limited to a count of + * 1024, counting 1 for each non-composite component blob and componentCount for each composite + * component blob. This value is set only for composite blobs. + * + * @see Component Count + * Property + */ + public Integer componentCount() { + return componentCount; + } + + /** + * Returns HTTP 1.1 Entity tag for the blob. + * + * @see Entity Tags + */ + public String etag() { + return etag; + } + + /** + * Returns the URI of this blob as a string. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the MD5 hash of blob's data encoded in base64. + * + * @see + * Hashes and ETags: Best Practices + */ + public String md5() { + return Data.isNull(md5) ? null : md5; + } + + /** + * Returns the CRC32C checksum of blob's data as described in + * RFC 4960, Appendix B; encoded in + * base64 in big-endian order. + * + * @see + * Hashes and ETags: Best Practices + */ + public String crc32c() { + return Data.isNull(crc32c) ? null : crc32c; + } + + /** + * Returns the blob's media download link. + */ + public String mediaLink() { + return mediaLink; + } + + /** + * Returns blob's user provided metadata. + */ + public Map metadata() { + return metadata == null || Data.isNull(metadata) ? null : Collections.unmodifiableMap(metadata); + } + + /** + * Returns blob's data generation. Used for blob versioning. + */ + public Long generation() { + return blobId().generation(); + } + + /** + * Returns blob's metageneration. Used for preconditions and for detecting changes in metadata. + * A metageneration number is only meaningful in the context of a particular generation of a + * particular blob. + */ + public Long metageneration() { + return metageneration; + } + + /** + * Returns the deletion time of the blob. + */ + public Long deleteTime() { + return deleteTime; + } + + /** + * Returns the last modification time of the blob's metadata. + */ + public Long updateTime() { + return updateTime; + } + + /** + * Returns the service options associated with this blob. + */ + public StorageOptions options() { + return options; + } + + /** + * Returns the service object associated with this blob. + */ + public Storage storage() { + return storage; + } + + /** + * Returns a builder for the current blob. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("bucket", bucket()) + .add("name", name()) + .add("generation", generation()) + .add("size", size()) + .add("content-type", contentType()) + .add("metadata", metadata()) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(blobId); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Blob && Objects.equals(toPb(), ((Blob) obj).toPb()) + && Objects.equals(options, ((Blob) obj).options); + } + + StorageObject toPb() { + StorageObject storageObject = blobId.toPb(); + if (acl != null) { + storageObject.setAcl(Lists.transform(acl, new Function() { + @Override + public ObjectAccessControl apply(Acl acl) { + return acl.toObjectPb(); + } + })); + } + if (deleteTime != null) { + storageObject.setTimeDeleted(new DateTime(deleteTime)); + } + if (updateTime != null) { + storageObject.setUpdated(new DateTime(updateTime)); + } + if (size != null) { + storageObject.setSize(BigInteger.valueOf(size)); + } + if (owner != null) { + storageObject.setOwner(new Owner().setEntity(owner.toPb())); + } + Map pbMetadata = metadata; + if (metadata != null && !Data.isNull(metadata)) { + pbMetadata = Maps.newHashMapWithExpectedSize(metadata.size()); + for (Map.Entry entry : metadata.entrySet()) { + pbMetadata.put(entry.getKey(), + firstNonNull(entry.getValue(), Data.nullOf(String.class))); + } + } + storageObject.setMetadata(pbMetadata); + storageObject.setCacheControl(cacheControl); + storageObject.setContentEncoding(contentEncoding); + storageObject.setCrc32c(crc32c); + storageObject.setContentType(contentType); + storageObject.setMd5Hash(md5); + storageObject.setMediaLink(mediaLink); + storageObject.setMetageneration(metageneration); + storageObject.setContentDisposition(contentDisposition); + storageObject.setComponentCount(componentCount); + storageObject.setContentLanguage(contentLanguage); + storageObject.setEtag(etag); + storageObject.setId(id); + storageObject.setSelfLink(selfLink); + return storageObject; + } + + /** + * Returns a {@code Blob} builder where blob identity is set using the provided values. + */ + public static Builder builder(Storage storage, Bucket bucket, String name) { + return builder(storage, bucket.name(), name); + } + + /** + * Returns a {@code Blob} builder where blob identity is set using the provided values. + */ + public static Builder builder(Storage storage, String bucket, String name) { + return new Builder().blobId(BlobId.of(bucket, name)).storage(storage); + } + + /** + * Returns a {@code Blob} builder where blob identity is set using the provided values. + */ + public static Builder builder( +Storage storage, Bucket bucket, String name, Long generation) { + return builder(storage, bucket.name(), name, generation); + } + + /** + * Returns a {@code Blob} builder where blob identity is set using the provided values. + */ + public static Builder builder(Storage storage, String bucket, String name, Long generation) { + return new Builder().blobId(BlobId.of(bucket, name, generation)).storage(storage); + } + + public static Builder builder(Storage storage, BlobId blobId) { + return new Builder().blobId(blobId).storage(storage); + } + + static Blob fromPb(Storage storage, StorageObject storageObject) { + Builder builder = builder(storage, BlobId.fromPb(storageObject)); + if (storageObject.getCacheControl() != null) { + builder.cacheControl(storageObject.getCacheControl()); + } + if (storageObject.getContentEncoding() != null) { + builder.contentEncoding(storageObject.getContentEncoding()); + } + if (storageObject.getCrc32c() != null) { + builder.crc32c(storageObject.getCrc32c()); + } + if (storageObject.getContentType() != null) { + builder.contentType(storageObject.getContentType()); + } + if (storageObject.getMd5Hash() != null) { + builder.md5(storageObject.getMd5Hash()); + } + if (storageObject.getMediaLink() != null) { + builder.mediaLink(storageObject.getMediaLink()); + } + if (storageObject.getMetageneration() != null) { + builder.metageneration(storageObject.getMetageneration()); + } + if (storageObject.getContentDisposition() != null) { + builder.contentDisposition(storageObject.getContentDisposition()); + } + if (storageObject.getComponentCount() != null) { + builder.componentCount(storageObject.getComponentCount()); + } + if (storageObject.getContentLanguage() != null) { + builder.contentLanguage(storageObject.getContentLanguage()); + } + if (storageObject.getEtag() != null) { + builder.etag(storageObject.getEtag()); + } + if (storageObject.getId() != null) { + builder.id(storageObject.getId()); + } + if (storageObject.getSelfLink() != null) { + builder.selfLink(storageObject.getSelfLink()); + } + if (storageObject.getMetadata() != null) { + builder.metadata(storageObject.getMetadata()); + } + if (storageObject.getTimeDeleted() != null) { + builder.deleteTime(storageObject.getTimeDeleted().getValue()); + } + if (storageObject.getUpdated() != null) { + builder.updateTime(storageObject.getUpdated().getValue()); + } + if (storageObject.getSize() != null) { + builder.size(storageObject.getSize().longValue()); + } + if (storageObject.getOwner() != null) { + builder.owner(Acl.Entity.fromPb(storageObject.getOwner().getEntity())); + } + if (storageObject.getAcl() != null) { + builder.acl(Lists.transform(storageObject.getAcl(), new Function() { + @Override + public Acl apply(ObjectAccessControl objectAccessControl) { + return Acl.fromPb(objectAccessControl); + } + })); + } + return builder.build(); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.storage = options.service(); + } + + /** + * Create a new blob with no content. + * + * @return a complete blob + * @throws StorageException upon failure + */ + public Blob create(BlobTargetOption... options) { + return storage.create(this, options); + } + + /** + * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * {@link #writer} is recommended as it uses resumable upload. MD5 and CRC32C hashes of + * {@code content} are computed and used for validating transferred data. + * + * @return a complete blob + * @throws StorageException upon failure + * @see Hashes and ETags + */ + public Blob create(byte[] content, BlobTargetOption... options) { + return storage.create(this, content, options); + } + + /** + * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * {@link #writer} is recommended as it uses resumable upload. By default any md5 and crc32c + * values in the given {@code blob} are ignored unless requested via the + * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. + * + * @return a complete blob + * @throws StorageException upon failure + */ + public Blob create(InputStream content, BlobWriteOption... options) { + return storage.create(this, content, options); } /** @@ -184,22 +910,7 @@ public static Blob get(Storage storage, String bucket, String blob, * @throws StorageException upon failure */ public static Blob get(Storage storage, BlobId blobId, Storage.BlobGetOption... options) { - BlobInfo info = storage.get(blobId, options); - return info != null ? new Blob(storage, info) : null; - } - - /** - * Returns the blob's information. - */ - public BlobInfo info() { - return info; - } - - /** - * Returns the blob's id. - */ - public BlobId id() { - return info.blobId(); + return storage.get(blobId, options); } /** @@ -211,9 +922,9 @@ public BlobId id() { */ public boolean exists(BlobSourceOption... options) { int length = options.length; - Storage.BlobGetOption[] getOptions = Arrays.copyOf(toGetOptions(info, options), length + 1); + Storage.BlobGetOption[] getOptions = Arrays.copyOf(toGetOptions(this, options), length + 1); getOptions[length] = Storage.BlobGetOption.fields(); - return storage.get(info.blobId(), getOptions) != null; + return storage.get(blobId, getOptions) != null; } /** @@ -223,7 +934,7 @@ public boolean exists(BlobSourceOption... options) { * @throws StorageException upon failure */ public byte[] content(Storage.BlobSourceOption... options) { - return storage.readAllBytes(info.blobId(), options); + return storage.readAllBytes(blobId, options); } /** @@ -234,7 +945,7 @@ public byte[] content(Storage.BlobSourceOption... options) { * @throws StorageException upon failure */ public Blob reload(BlobSourceOption... options) { - return Blob.get(storage, info.blobId(), toGetOptions(info, options)); + return Blob.get(storage, blobId, toGetOptions(this, options)); } /** @@ -243,27 +954,27 @@ public Blob reload(BlobSourceOption... options) { * {@link #delete} operations. A new {@code Blob} object is returned. By default no checks are * made on the metadata generation of the current blob. If you want to update the information only * if the current blob metadata are at their latest version use the {@code metagenerationMatch} - * option: {@code blob.update(newInfo, BlobTargetOption.metagenerationMatch())}. + * option: {@code blob.update(newBlob, BlobTargetOption.metagenerationMatch())}. * - *

Original metadata are merged with metadata in the provided {@code blobInfo}. To replace + *

Original metadata are merged with metadata in the provided {@code blob}. To replace * metadata instead you first have to unset them. Unsetting metadata can be done by setting the - * provided {@code blobInfo}'s metadata to {@code null}. + * provided {@code blob}'s metadata to {@code null}. *

* *

Example usage of replacing blob's metadata: - *

    {@code blob.update(blob.info().toBuilder().metadata(null).build());}
-   *    {@code blob.update(blob.info().toBuilder().metadata(newMetadata).build());}
+   * 
    {@code blob.update(blob.toBuilder().metadata(null).build());}
+   *    {@code blob.update(blob.toBuilder().metadata(newMetadata).build());}
    * 
* - * @param blobInfo new blob's information. Bucket and blob names must match the current ones + * @param blob the new blob. Bucket and blob names must match the current ones * @param options update options * @return a {@code Blob} object with updated information * @throws StorageException upon failure */ - public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { - checkArgument(Objects.equals(blobInfo.bucket(), info.bucket()), "Bucket name must match"); - checkArgument(Objects.equals(blobInfo.name(), info.name()), "Blob name must match"); - return new Blob(storage, storage.update(blobInfo, options)); + public Blob update(Blob blob, BlobTargetOption... options) { + checkArgument(Objects.equals(blob.bucket(), bucket()), "Bucket name must match"); + checkArgument(Objects.equals(blob.name(), name()), "Blob name must match"); + return storage.update(blob, options); } /** @@ -274,7 +985,7 @@ public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { * @throws StorageException upon failure */ public boolean delete(BlobSourceOption... options) { - return storage.delete(info.blobId(), toSourceOptions(info, options)); + return storage.delete(blobId, toSourceOptions(this, options)); } /** @@ -288,8 +999,12 @@ public boolean delete(BlobSourceOption... options) { * @throws StorageException upon failure */ public CopyWriter copyTo(BlobId targetBlob, BlobSourceOption... options) { - CopyRequest copyRequest = CopyRequest.builder().source(info.bucket(), info.name()) - .sourceOptions(toSourceOptions(info, options)).target(targetBlob).build(); + CopyRequest copyRequest = + CopyRequest.builder() + .source(this.bucket(), this.name()) + .sourceOptions(toSourceOptions(this, options)) + .target(storage, targetBlob) + .build(); return storage.copy(copyRequest); } @@ -304,7 +1019,7 @@ public CopyWriter copyTo(BlobId targetBlob, BlobSourceOption... options) { * @throws StorageException upon failure */ public CopyWriter copyTo(String targetBucket, BlobSourceOption... options) { - return copyTo(targetBucket, info.name(), options); + return copyTo(targetBucket, name(), options); } /** @@ -329,7 +1044,7 @@ public CopyWriter copyTo(String targetBucket, String targetBlob, BlobSourceOptio * @throws StorageException upon failure */ public ReadChannel reader(BlobSourceOption... options) { - return storage.reader(info.blobId(), toSourceOptions(info, options)); + return storage.reader(blobId, toSourceOptions(this, options)); } /** @@ -341,7 +1056,7 @@ public ReadChannel reader(BlobSourceOption... options) { * @throws StorageException upon failure */ public WriteChannel writer(BlobWriteOption... options) { - return storage.writer(info, options); + return storage.writer(this, options); } /** @@ -358,14 +1073,7 @@ public WriteChannel writer(BlobWriteOption... options) { * @see Signed-URLs */ public URL signUrl(long duration, TimeUnit unit, SignUrlOption... options) { - return storage.signUrl(info, duration, unit, options); - } - - /** - * Returns the blob's {@code Storage} object used to issue requests. - */ - public Storage storage() { - return storage; + return storage.signUrl(this, duration, unit, options); } /** @@ -405,42 +1113,30 @@ public static List get(final Storage storage, List blobs) { checkNotNull(storage); checkNotNull(blobs); BlobId[] blobArray = blobs.toArray(new BlobId[blobs.size()]); - return Collections.unmodifiableList(Lists.transform(storage.get(blobArray), - new Function() { - @Override - public Blob apply(BlobInfo blobInfo) { - return blobInfo != null ? new Blob(storage, blobInfo) : null; - } - })); + return Collections.unmodifiableList(storage.get(blobArray)); } /** * Updates the requested blobs. A batch request is used to update blobs. Original metadata are - * merged with metadata in the provided {@code BlobInfo} objects. To replace metadata instead + * merged with metadata in the provided {@code Blob} objects. To replace metadata instead * you first have to unset them. Unsetting metadata can be done by setting the provided - * {@code BlobInfo} objects metadata to {@code null}. See - * {@link #update(com.google.gcloud.storage.BlobInfo, + * {@code Blob} objects metadata to {@code null}. See + * {@link #update(com.google.gcloud.storage.Blob, * com.google.gcloud.storage.Storage.BlobTargetOption...) } for a code example. * * @param storage the storage service used to issue the request - * @param infos the blobs to update + * @param blobs the blobs to update * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it has * been denied the corresponding item in the list is {@code null} * @throws StorageException upon failure */ - public static List update(final Storage storage, BlobInfo... infos) { + public static List update(final Storage storage, Blob... blobs) { checkNotNull(storage); - checkNotNull(infos); - if (infos.length == 0) { + checkNotNull(blobs); + if (blobs.length == 0) { return Collections.emptyList(); } - return Collections.unmodifiableList(Lists.transform(storage.update(infos), - new Function() { - @Override - public Blob apply(BlobInfo blobInfo) { - return blobInfo != null ? new Blob(storage, blobInfo) : null; - } - })); + return Collections.unmodifiableList(storage.update(blobs)); } /** diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java deleted file mode 100644 index b27d00d68a16..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java +++ /dev/null @@ -1,699 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.common.base.MoreObjects.firstNonNull; -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.api.client.util.Data; -import com.google.api.client.util.DateTime; -import com.google.api.services.storage.model.ObjectAccessControl; -import com.google.api.services.storage.model.StorageObject; -import com.google.api.services.storage.model.StorageObject.Owner; -import com.google.common.base.Function; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -import java.io.Serializable; -import java.math.BigInteger; -import java.util.AbstractMap; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -/** - * Google Storage object metadata. - * - * @see Concepts and - * Terminology - */ -public final class BlobInfo implements Serializable { - - static final Function FROM_PB_FUNCTION = - new Function() { - @Override - public BlobInfo apply(StorageObject pb) { - return BlobInfo.fromPb(pb); - } - }; - static final Function TO_PB_FUNCTION = - new Function() { - @Override - public StorageObject apply(BlobInfo blobInfo) { - return blobInfo.toPb(); - } - }; - private static final long serialVersionUID = 2228487739943277159L; - private final BlobId blobId; - private final String id; - private final String selfLink; - private final String cacheControl; - private final List acl; - private final Acl.Entity owner; - private final Long size; - private final String etag; - private final String md5; - private final String crc32c; - private final String mediaLink; - private final Map metadata; - private final Long metageneration; - private final Long deleteTime; - private final Long updateTime; - private final String contentType; - private final String contentEncoding; - private final String contentDisposition; - private final String contentLanguage; - private final Integer componentCount; - - /** - * This class is meant for internal use only. Users are discouraged from using this class. - */ - public static final class ImmutableEmptyMap extends AbstractMap { - - @Override - public Set> entrySet() { - return ImmutableSet.of(); - } - } - - public static final class Builder { - - private BlobId blobId; - private String id; - private String contentType; - private String contentEncoding; - private String contentDisposition; - private String contentLanguage; - private Integer componentCount; - private String cacheControl; - private List acl; - private Acl.Entity owner; - private Long size; - private String etag; - private String selfLink; - private String md5; - private String crc32c; - private String mediaLink; - private Map metadata; - private Long metageneration; - private Long deleteTime; - private Long updateTime; - - private Builder() {} - - private Builder(BlobInfo blobInfo) { - blobId = blobInfo.blobId; - id = blobInfo.id; - cacheControl = blobInfo.cacheControl; - contentEncoding = blobInfo.contentEncoding; - contentType = blobInfo.contentType; - contentDisposition = blobInfo.contentDisposition; - contentLanguage = blobInfo.contentLanguage; - componentCount = blobInfo.componentCount; - acl = blobInfo.acl; - owner = blobInfo.owner; - size = blobInfo.size; - etag = blobInfo.etag; - selfLink = blobInfo.selfLink; - md5 = blobInfo.md5; - crc32c = blobInfo.crc32c; - mediaLink = blobInfo.mediaLink; - metadata = blobInfo.metadata; - metageneration = blobInfo.metageneration; - deleteTime = blobInfo.deleteTime; - updateTime = blobInfo.updateTime; - } - - /** - * Sets the blob identity. - */ - public Builder blobId(BlobId blobId) { - this.blobId = checkNotNull(blobId); - return this; - } - - Builder id(String id) { - this.id = id; - return this; - } - - /** - * Sets the blob's data content type. - * - * @see Content-Type - */ - public Builder contentType(String contentType) { - this.contentType = firstNonNull(contentType, Data.nullOf(String.class)); - return this; - } - - /** - * Sets the blob's data content disposition. - * - * @see Content-Disposition - */ - public Builder contentDisposition(String contentDisposition) { - this.contentDisposition = firstNonNull(contentDisposition, Data.nullOf(String.class)); - return this; - } - - /** - * Sets the blob's data content language. - * - * @see Content-Language - */ - public Builder contentLanguage(String contentLanguage) { - this.contentLanguage = firstNonNull(contentLanguage, Data.nullOf(String.class)); - return this; - } - - /** - * Sets the blob's data content encoding. - * - * @see Content-Encoding - */ - public Builder contentEncoding(String contentEncoding) { - this.contentEncoding = firstNonNull(contentEncoding, Data.nullOf(String.class)); - return this; - } - - Builder componentCount(Integer componentCount) { - this.componentCount = componentCount; - return this; - } - - /** - * Sets the blob's data cache control. - * - * @see Cache-Control - */ - public Builder cacheControl(String cacheControl) { - this.cacheControl = firstNonNull(cacheControl, Data.nullOf(String.class)); - return this; - } - - /** - * Sets the blob's access control configuration. - * - * @see - * About Access Control Lists - */ - public Builder acl(List acl) { - this.acl = acl != null ? ImmutableList.copyOf(acl) : null; - return this; - } - - Builder owner(Acl.Entity owner) { - this.owner = owner; - return this; - } - - Builder size(Long size) { - this.size = size; - return this; - } - - Builder etag(String etag) { - this.etag = etag; - return this; - } - - Builder selfLink(String selfLink) { - this.selfLink = selfLink; - return this; - } - - /** - * Sets the MD5 hash of blob's data. MD5 value must be encoded in base64. - * - * @see - * Hashes and ETags: Best Practices - */ - public Builder md5(String md5) { - this.md5 = firstNonNull(md5, Data.nullOf(String.class)); - return this; - } - - /** - * Sets the CRC32C checksum of blob's data as described in - * RFC 4960, Appendix B; encoded in - * base64 in big-endian order. - * - * @see - * Hashes and ETags: Best Practices - */ - public Builder crc32c(String crc32c) { - this.crc32c = firstNonNull(crc32c, Data.nullOf(String.class)); - return this; - } - - Builder mediaLink(String mediaLink) { - this.mediaLink = mediaLink; - return this; - } - - /** - * Sets the blob's user provided metadata. - */ - public Builder metadata(Map metadata) { - this.metadata = metadata != null - ? new HashMap<>(metadata) : Data.>nullOf(ImmutableEmptyMap.class); - return this; - } - - Builder metageneration(Long metageneration) { - this.metageneration = metageneration; - return this; - } - - Builder deleteTime(Long deleteTime) { - this.deleteTime = deleteTime; - return this; - } - - Builder updateTime(Long updateTime) { - this.updateTime = updateTime; - return this; - } - - /** - * Creates a {@code BlobInfo} object. - */ - public BlobInfo build() { - checkNotNull(blobId); - return new BlobInfo(this); - } - } - - private BlobInfo(Builder builder) { - blobId = builder.blobId; - id = builder.id; - cacheControl = builder.cacheControl; - contentEncoding = builder.contentEncoding; - contentType = builder.contentType; - contentDisposition = builder.contentDisposition; - contentLanguage = builder.contentLanguage; - componentCount = builder.componentCount; - acl = builder.acl; - owner = builder.owner; - size = builder.size; - etag = builder.etag; - selfLink = builder.selfLink; - md5 = builder.md5; - crc32c = builder.crc32c; - mediaLink = builder.mediaLink; - metadata = builder.metadata; - metageneration = builder.metageneration; - deleteTime = builder.deleteTime; - updateTime = builder.updateTime; - } - - /** - * Returns the blob's identity. - */ - public BlobId blobId() { - return blobId; - } - - /** - * Returns the name of the containing bucket. - */ - public String bucket() { - return blobId().bucket(); - } - - /** - * Returns the blob's id. - */ - public String id() { - return id; - } - - /** - * Returns the blob's name. - */ - public String name() { - return blobId().name(); - } - - /** - * Returns the blob's data cache control. - * - * @see Cache-Control - */ - public String cacheControl() { - return Data.isNull(cacheControl) ? null : cacheControl; - } - - /** - * Returns the blob's access control configuration. - * - * @see - * About Access Control Lists - */ - public List acl() { - return acl; - } - - /** - * Returns the blob's owner. This will always be the uploader of the blob. - */ - public Acl.Entity owner() { - return owner; - } - - /** - * Returns the content length of the data in bytes. - * - * @see Content-Length - */ - public Long size() { - return size; - } - - /** - * Returns the blob's data content type. - * - * @see Content-Type - */ - public String contentType() { - return Data.isNull(contentType) ? null : contentType; - } - - /** - * Returns the blob's data content encoding. - * - * @see Content-Encoding - */ - public String contentEncoding() { - return Data.isNull(contentEncoding) ? null : contentEncoding; - } - - /** - * Returns the blob's data content disposition. - * - * @see Content-Disposition - */ - public String contentDisposition() { - return Data.isNull(contentDisposition) ? null : contentDisposition; - } - - /** - * Returns the blob's data content language. - * - * @see Content-Language - */ - public String contentLanguage() { - return Data.isNull(contentLanguage) ? null : contentLanguage; - } - - /** - * Returns the number of components that make up this blob. Components are accumulated through - * the {@link Storage#compose(Storage.ComposeRequest)} operation and are limited to a count of - * 1024, counting 1 for each non-composite component blob and componentCount for each composite - * component blob. This value is set only for composite blobs. - * - * @see Component Count - * Property - */ - public Integer componentCount() { - return componentCount; - } - - /** - * Returns HTTP 1.1 Entity tag for the blob. - * - * @see Entity Tags - */ - public String etag() { - return etag; - } - - /** - * Returns the URI of this blob as a string. - */ - public String selfLink() { - return selfLink; - } - - /** - * Returns the MD5 hash of blob's data encoded in base64. - * - * @see - * Hashes and ETags: Best Practices - */ - public String md5() { - return Data.isNull(md5) ? null : md5; - } - - /** - * Returns the CRC32C checksum of blob's data as described in - * RFC 4960, Appendix B; encoded in - * base64 in big-endian order. - * - * @see - * Hashes and ETags: Best Practices - */ - public String crc32c() { - return Data.isNull(crc32c) ? null : crc32c; - } - - /** - * Returns the blob's media download link. - */ - public String mediaLink() { - return mediaLink; - } - - /** - * Returns blob's user provided metadata. - */ - public Map metadata() { - return metadata == null || Data.isNull(metadata) ? null : Collections.unmodifiableMap(metadata); - } - - /** - * Returns blob's data generation. Used for blob versioning. - */ - public Long generation() { - return blobId().generation(); - } - - /** - * Returns blob's metageneration. Used for preconditions and for detecting changes in metadata. - * A metageneration number is only meaningful in the context of a particular generation of a - * particular blob. - */ - public Long metageneration() { - return metageneration; - } - - /** - * Returns the deletion time of the blob. - */ - public Long deleteTime() { - return deleteTime; - } - - /** - * Returns the last modification time of the blob's metadata. - */ - public Long updateTime() { - return updateTime; - } - - /** - * Returns a builder for the current blob. - */ - public Builder toBuilder() { - return new Builder(this); - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("bucket", bucket()) - .add("name", name()) - .add("generation", generation()) - .add("size", size()) - .add("content-type", contentType()) - .add("metadata", metadata()) - .toString(); - } - - @Override - public int hashCode() { - return Objects.hash(blobId); - } - - @Override - public boolean equals(Object obj) { - return obj instanceof BlobInfo && Objects.equals(toPb(), ((BlobInfo) obj).toPb()); - } - - StorageObject toPb() { - StorageObject storageObject = blobId.toPb(); - if (acl != null) { - storageObject.setAcl(Lists.transform(acl, new Function() { - @Override - public ObjectAccessControl apply(Acl acl) { - return acl.toObjectPb(); - } - })); - } - if (deleteTime != null) { - storageObject.setTimeDeleted(new DateTime(deleteTime)); - } - if (updateTime != null) { - storageObject.setUpdated(new DateTime(updateTime)); - } - if (size != null) { - storageObject.setSize(BigInteger.valueOf(size)); - } - if (owner != null) { - storageObject.setOwner(new Owner().setEntity(owner.toPb())); - } - Map pbMetadata = metadata; - if (metadata != null && !Data.isNull(metadata)) { - pbMetadata = Maps.newHashMapWithExpectedSize(metadata.size()); - for (Map.Entry entry : metadata.entrySet()) { - pbMetadata.put(entry.getKey(), - firstNonNull(entry.getValue(), Data.nullOf(String.class))); - } - } - storageObject.setMetadata(pbMetadata); - storageObject.setCacheControl(cacheControl); - storageObject.setContentEncoding(contentEncoding); - storageObject.setCrc32c(crc32c); - storageObject.setContentType(contentType); - storageObject.setMd5Hash(md5); - storageObject.setMediaLink(mediaLink); - storageObject.setMetageneration(metageneration); - storageObject.setContentDisposition(contentDisposition); - storageObject.setComponentCount(componentCount); - storageObject.setContentLanguage(contentLanguage); - storageObject.setEtag(etag); - storageObject.setId(id); - storageObject.setSelfLink(selfLink); - return storageObject; - } - - /** - * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. - */ - public static Builder builder(BucketInfo bucketInfo, String name) { - return builder(bucketInfo.name(), name); - } - - /** - * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. - */ - public static Builder builder(String bucket, String name) { - return new Builder().blobId(BlobId.of(bucket, name)); - } - - /** - * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. - */ - public static Builder builder(BucketInfo bucketInfo, String name, Long generation) { - return builder(bucketInfo.name(), name, generation); - } - - /** - * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. - */ - public static Builder builder(String bucket, String name, Long generation) { - return new Builder().blobId(BlobId.of(bucket, name, generation)); - } - - public static Builder builder(BlobId blobId) { - return new Builder().blobId(blobId); - } - - static BlobInfo fromPb(StorageObject storageObject) { - Builder builder = builder(BlobId.fromPb(storageObject)); - if (storageObject.getCacheControl() != null) { - builder.cacheControl(storageObject.getCacheControl()); - } - if (storageObject.getContentEncoding() != null) { - builder.contentEncoding(storageObject.getContentEncoding()); - } - if (storageObject.getCrc32c() != null) { - builder.crc32c(storageObject.getCrc32c()); - } - if (storageObject.getContentType() != null) { - builder.contentType(storageObject.getContentType()); - } - if (storageObject.getMd5Hash() != null) { - builder.md5(storageObject.getMd5Hash()); - } - if (storageObject.getMediaLink() != null) { - builder.mediaLink(storageObject.getMediaLink()); - } - if (storageObject.getMetageneration() != null) { - builder.metageneration(storageObject.getMetageneration()); - } - if (storageObject.getContentDisposition() != null) { - builder.contentDisposition(storageObject.getContentDisposition()); - } - if (storageObject.getComponentCount() != null) { - builder.componentCount(storageObject.getComponentCount()); - } - if (storageObject.getContentLanguage() != null) { - builder.contentLanguage(storageObject.getContentLanguage()); - } - if (storageObject.getEtag() != null) { - builder.etag(storageObject.getEtag()); - } - if (storageObject.getId() != null) { - builder.id(storageObject.getId()); - } - if (storageObject.getSelfLink() != null) { - builder.selfLink(storageObject.getSelfLink()); - } - if (storageObject.getMetadata() != null) { - builder.metadata(storageObject.getMetadata()); - } - if (storageObject.getTimeDeleted() != null) { - builder.deleteTime(storageObject.getTimeDeleted().getValue()); - } - if (storageObject.getUpdated() != null) { - builder.updateTime(storageObject.getUpdated().getValue()); - } - if (storageObject.getSize() != null) { - builder.size(storageObject.getSize().longValue()); - } - if (storageObject.getOwner() != null) { - builder.owner(Acl.Entity.fromPb(storageObject.getOwner().getEntity())); - } - if (storageObject.getAcl() != null) { - builder.acl(Lists.transform(storageObject.getAcl(), new Function() { - @Override - public Acl apply(ObjectAccessControl objectAccessControl) { - return Acl.fromPb(objectAccessControl); - } - })); - } - return builder.build(); - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java index d1d12ec77638..ee83f4fa8c9e 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java @@ -30,14 +30,14 @@ /** * Write channel implementation to upload Google Cloud Storage blobs. */ -class BlobWriteChannel extends BaseWriteChannel { +class BlobWriteChannel extends BaseWriteChannel { - BlobWriteChannel(StorageOptions options, BlobInfo blob, Map optionsMap) { - this(options, blob, options.rpc().open(blob.toPb(), optionsMap)); + BlobWriteChannel(Blob blob, Map optionsMap) { + this(blob, blob.options().rpc().open(blob.toPb(), optionsMap)); } - BlobWriteChannel(StorageOptions options, BlobInfo blobInfo, String uploadId) { - super(options, blobInfo, uploadId); + BlobWriteChannel(Blob blob, String uploadId) { + super(blob.options(), blob, uploadId); } @Override @@ -54,11 +54,12 @@ public void run() { } } + @Override protected StateImpl.Builder stateBuilder() { - return StateImpl.builder(options(), entity(), uploadId()); + return StateImpl.builder(entity(), uploadId()); } - static class StateImpl extends BaseWriteChannel.BaseState { + static class StateImpl extends BaseWriteChannel.BaseState { private static final long serialVersionUID = -9028324143780151286L; @@ -66,10 +67,10 @@ static class StateImpl extends BaseWriteChannel.BaseState { + static class Builder extends BaseWriteChannel.BaseState.Builder { - private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - super(options, blobInfo, uploadId); + private Builder(Blob blob, String uploadId) { + super(blob.options(), blob, uploadId); } @Override @@ -78,13 +79,13 @@ public RestorableState build() { } } - static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - return new Builder(options, blobInfo, uploadId); + static Builder builder(Blob blob, String uploadId) { + return new Builder(blob, uploadId); } @Override public WriteChannel restore() { - BlobWriteChannel channel = new BlobWriteChannel(serviceOptions, entity, uploadId); + BlobWriteChannel channel = new BlobWriteChannel(entity, uploadId); channel.restore(this); return channel; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java index d0e823492ee3..99b010cdc318 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java @@ -16,17 +16,30 @@ package com.google.gcloud.storage; +import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.collect.Lists.transform; import static com.google.gcloud.storage.Bucket.BucketSourceOption.toGetOptions; import static com.google.gcloud.storage.Bucket.BucketSourceOption.toSourceOptions; +import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.util.Data; +import com.google.api.client.util.DateTime; +import com.google.api.services.storage.model.Bucket.Lifecycle; +import com.google.api.services.storage.model.Bucket.Lifecycle.Rule; +import com.google.api.services.storage.model.Bucket.Owner; +import com.google.api.services.storage.model.Bucket.Versioning; +import com.google.api.services.storage.model.Bucket.Website; +import com.google.api.services.storage.model.BucketAccessControl; +import com.google.api.services.storage.model.ObjectAccessControl; import com.google.common.base.Function; import com.google.common.base.MoreObjects; -import com.google.common.collect.Iterators; +import com.google.common.collect.ImmutableList; import com.google.gcloud.Page; -import com.google.gcloud.PageImpl; import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.storage.Acl.Entity; import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BlobTargetOption; import com.google.gcloud.storage.Storage.BlobWriteOption; @@ -35,11 +48,11 @@ import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -51,73 +64,282 @@ * {@link #reload}. *

*/ -public final class Bucket { +public final class Bucket implements Serializable { - private final Storage storage; - private final BucketInfo info; + private static final long serialVersionUID = -4052051461903992871L; - private static class BlobPageFetcher implements PageImpl.NextPageFetcher { + static final Function, Bucket> + FROM_PB_FUNCTION = + new Function, Bucket>() { + @Override + public Bucket apply(Tuple pb) { + return Bucket.fromPb(pb.x(), pb.y()); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public com.google.api.services.storage.model.Bucket apply(Bucket bucket) { + return bucket.toPb(); + } + }; + + private final String id; + private final String name; + private final Acl.Entity owner; + private final String selfLink; + private final Boolean versioningEnabled; + private final String indexPage; + private final String notFoundPage; + private final List deleteRules; + private final String etag; + private final Long createTime; + private final Long metageneration; + private final List cors; + private final List acl; + private final List defaultAcl; + private final String location; + private final String storageClass; + private final StorageOptions options; + private transient Storage storage; + + /** + * Base class for bucket's delete rules. Allows to configure automatic deletion of blobs and blobs + * versions. + * + * @see Object Lifecycle Management + */ + public abstract static class DeleteRule implements Serializable { + + private static final long serialVersionUID = -233553882888158723L; + + private static final String SUPPORTED_ACTION = "Delete"; + private final Type type; + + public enum Type { + AGE, CREATE_BEFORE, NUM_NEWER_VERSIONS, IS_LIVE, UNKNOWN + } + + DeleteRule(Type type) { + this.type = type; + } + + public Type type() { + return type; + } + + @Override + public int hashCode() { + return Objects.hash(type); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final DeleteRule other = (DeleteRule) obj; + return Objects.equals(toPb(), other.toPb()); + } + + Rule toPb() { + Rule rule = new Rule(); + rule.setAction(new Rule.Action().setType(SUPPORTED_ACTION)); + Rule.Condition condition = new Rule.Condition(); + populateCondition(condition); + rule.setCondition(condition); + return rule; + } + + abstract void populateCondition(Rule.Condition condition); + + static DeleteRule fromPb(Rule rule) { + if (rule.getAction() != null && SUPPORTED_ACTION.endsWith(rule.getAction().getType())) { + Rule.Condition condition = rule.getCondition(); + Integer age = condition.getAge(); + if (age != null) { + return new AgeDeleteRule(age); + } + DateTime dateTime = condition.getCreatedBefore(); + if (dateTime != null) { + return new CreatedBeforeDeleteRule(dateTime.getValue()); + } + Integer numNewerVersions = condition.getNumNewerVersions(); + if (numNewerVersions != null) { + return new NumNewerVersionsDeleteRule(numNewerVersions); + } + Boolean isLive = condition.getIsLive(); + if (isLive != null) { + return new IsLiveDeleteRule(isLive); + } + } + return new RawDeleteRule(rule); + } + } - private static final long serialVersionUID = 3221100177471323801L; + /** + * Delete rule class that sets a Time To Live for blobs in the bucket. + * + * @see Object Lifecycle Management + */ + public static class AgeDeleteRule extends DeleteRule { + + private static final long serialVersionUID = 4008399976841554647L; + + private final int daysToLive; - private final StorageOptions options; - private final Page infoPage; + /** + * Creates an {@code AgeDeleteRule} object. + * + * @param daysToLive blobs' Time To Live expressed in days. The time when the age condition is + * considered to be satisfied is computed by adding {@code daysToLive} days to the + * midnight following blob's creation time in UTC. + */ + public AgeDeleteRule(int daysToLive) { + super(Type.AGE); + this.daysToLive = daysToLive; + } - BlobPageFetcher(StorageOptions options, Page infoPage) { - this.options = options; - this.infoPage = infoPage; + public int daysToLive() { + return daysToLive; } @Override - public Page nextPage() { - Page nextInfoPage = infoPage.nextPage(); - return new PageImpl<>(new BlobPageFetcher(options, nextInfoPage), - nextInfoPage.nextPageCursor(), new LazyBlobIterable(options, nextInfoPage.values())); + void populateCondition(Rule.Condition condition) { + condition.setAge(daysToLive); } } - private static class LazyBlobIterable implements Iterable, Serializable { + static class RawDeleteRule extends DeleteRule { + + private static final long serialVersionUID = -5296731123363309473L; + + private transient Rule rule; - private static final long serialVersionUID = -3092290247725378832L; + RawDeleteRule(Rule rule) { + super(Type.UNKNOWN); + this.rule = rule; + } - private final StorageOptions options; - private final Iterable infoIterable; - private transient Storage storage; + @Override + void populateCondition(Rule.Condition condition) { + throw new UnsupportedOperationException(); + } - public LazyBlobIterable(StorageOptions options, Iterable infoIterable) { - this.options = options; - this.infoIterable = infoIterable; - this.storage = options.service(); + private void writeObject(ObjectOutputStream out) throws IOException { + out.defaultWriteObject(); + out.writeUTF(rule.toString()); } - private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + private void readObject(ObjectInputStream in) throws IOException, + ClassNotFoundException { in.defaultReadObject(); - this.storage = options.service(); + rule = new JacksonFactory().fromString(in.readUTF(), Rule.class); } @Override - public Iterator iterator() { - return Iterators.transform(infoIterable.iterator(), new Function() { - @Override - public Blob apply(BlobInfo blobInfo) { - return new Blob(storage, blobInfo); - } - }); + Rule toPb() { + return rule; + } + } + + /** + * Delete rule class for blobs in the bucket that have been created before a certain date. + * + * @see Object Lifecycle Management + */ + public static class CreatedBeforeDeleteRule extends DeleteRule { + + private static final long serialVersionUID = -2821235292289446045L; + + private final long timeMillis; + + /** + * Creates an {@code CreatedBeforeDeleteRule} object. + * + * @param timeMillis a date in UTC. Blobs that have been created before midnight of the provided + * date meet the delete condition + */ + public CreatedBeforeDeleteRule(long timeMillis) { + super(Type.CREATE_BEFORE); + this.timeMillis = timeMillis; + } + + public long timeMillis() { + return timeMillis; } @Override - public int hashCode() { - return Objects.hash(options, infoIterable); + void populateCondition(Rule.Condition condition) { + condition.setCreatedBefore(new DateTime(timeMillis)); + } + } + + /** + * Delete rule class for versioned blobs. Specifies when to delete a blob's version according to + * the number of available newer versions for that blob. + * + * @see Object Lifecycle Management + */ + public static class NumNewerVersionsDeleteRule extends DeleteRule { + + private static final long serialVersionUID = -5801983633758098154L; + + private final int numNewerVersions; + + /** + * Creates an {@code NumNewerVersionsDeleteRule} object. + * + * @param numNewerVersions the number of newer versions. A blob's version meets the delete + * condition when {@code numNewerVersions} newer versions are available. + */ + public NumNewerVersionsDeleteRule(int numNewerVersions) { + super(Type.NUM_NEWER_VERSIONS); + this.numNewerVersions = numNewerVersions; + } + + public int numNewerVersions() { + return numNewerVersions; } @Override - public boolean equals(Object obj) { - if (!(obj instanceof LazyBlobIterable)) { - return false; - } - LazyBlobIterable other = (LazyBlobIterable) obj; - return Objects.equals(options, other.options) - && Objects.equals(infoIterable, other.infoIterable); + void populateCondition(Rule.Condition condition) { + condition.setNumNewerVersions(numNewerVersions); + } + } + + /** + * Delete rule class to distinguish between live and archived blobs. + * + * @see Object Lifecycle Management + */ + public static class IsLiveDeleteRule extends DeleteRule { + + private static final long serialVersionUID = 6556002405077879041L; + + private final boolean isLive; + + /** + * Creates an {@code IsLiveDeleteRule} object. + * + * @param isLive if set to {@code true} live blobs meet the delete condition. If set to + * {@code false} delete condition is met by archived blobs. + */ + public IsLiveDeleteRule(boolean isLive) { + super(Type.IS_LIVE); + this.isLive = isLive; + } + + public boolean isLive() { + return isLive; + } + + @Override + void populateCondition(Rule.Condition condition) { + condition.setIsLive(isLive); } } @@ -126,29 +348,29 @@ public boolean equals(Object obj) { */ public static class BucketSourceOption extends Option { - private static final long serialVersionUID = 6928872234155522371L; + private static final long serialVersionUID = 2256447327846232412L; private BucketSourceOption(StorageRpc.Option rpcOption) { super(rpcOption, null); } - private Storage.BucketSourceOption toSourceOptions(BucketInfo bucketInfo) { + private Storage.BucketSourceOption toSourceOptions(Bucket bucket) { switch (rpcOption()) { case IF_METAGENERATION_MATCH: - return Storage.BucketSourceOption.metagenerationMatch(bucketInfo.metageneration()); + return Storage.BucketSourceOption.metagenerationMatch(bucket.metageneration()); case IF_METAGENERATION_NOT_MATCH: - return Storage.BucketSourceOption.metagenerationNotMatch(bucketInfo.metageneration()); + return Storage.BucketSourceOption.metagenerationNotMatch(bucket.metageneration()); default: throw new AssertionError("Unexpected enum value"); } } - private Storage.BucketGetOption toGetOption(BucketInfo bucketInfo) { + private Storage.BucketGetOption toGetOption(Bucket bucket) { switch (rpcOption()) { case IF_METAGENERATION_MATCH: - return Storage.BucketGetOption.metagenerationMatch(bucketInfo.metageneration()); + return Storage.BucketGetOption.metagenerationMatch(bucket.metageneration()); case IF_METAGENERATION_NOT_MATCH: - return Storage.BucketGetOption.metagenerationNotMatch(bucketInfo.metageneration()); + return Storage.BucketGetOption.metagenerationNotMatch(bucket.metageneration()); default: throw new AssertionError("Unexpected enum value"); } @@ -170,44 +392,555 @@ public static BucketSourceOption metagenerationNotMatch() { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } - static Storage.BucketSourceOption[] toSourceOptions(BucketInfo bucketInfo, - BucketSourceOption... options) { + static Storage.BucketSourceOption[] toSourceOptions( + Bucket bucket, BucketSourceOption... options) { Storage.BucketSourceOption[] convertedOptions = new Storage.BucketSourceOption[options.length]; int index = 0; for (BucketSourceOption option : options) { - convertedOptions[index++] = option.toSourceOptions(bucketInfo); + convertedOptions[index++] = option.toSourceOptions(bucket); } return convertedOptions; } - static Storage.BucketGetOption[] toGetOptions(BucketInfo bucketInfo, - BucketSourceOption... options) { + static Storage.BucketGetOption[] toGetOptions(Bucket bucket, BucketSourceOption... options) { Storage.BucketGetOption[] convertedOptions = new Storage.BucketGetOption[options.length]; int index = 0; for (BucketSourceOption option : options) { - convertedOptions[index++] = option.toGetOption(bucketInfo); + convertedOptions[index++] = option.toGetOption(bucket); } return convertedOptions; } } + public static final class Builder { + + private String id; + private String name; + private Acl.Entity owner; + private String selfLink; + private Boolean versioningEnabled; + private String indexPage; + private String notFoundPage; + private List deleteRules; + private String storageClass; + private String location; + private String etag; + private Long createTime; + private Long metageneration; + private List cors; + private List acl; + private List defaultAcl; + private Storage storage; + + private Builder() {} + + private Builder(Bucket bucket) { + id = bucket.id; + name = bucket.name; + etag = bucket.etag; + createTime = bucket.createTime; + metageneration = bucket.metageneration; + location = bucket.location; + storageClass = bucket.storageClass; + cors = bucket.cors; + acl = bucket.acl; + defaultAcl = bucket.defaultAcl; + owner = bucket.owner; + selfLink = bucket.selfLink; + versioningEnabled = bucket.versioningEnabled; + indexPage = bucket.indexPage; + notFoundPage = bucket.notFoundPage; + deleteRules = bucket.deleteRules; + storage = bucket.storage; + } + + /** + * Sets the bucket's name. + */ + public Builder name(String name) { + this.name = checkNotNull(name); + return this; + } + + Builder id(String id) { + this.id = id; + return this; + } + + Builder owner(Acl.Entity owner) { + this.owner = owner; + return this; + } + + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + /** + * Sets whether versioning should be enabled for this bucket. When set to true, versioning is + * fully enabled. + */ + public Builder versioningEnabled(Boolean enable) { + this.versioningEnabled = firstNonNull(enable, Data.nullOf(Boolean.class)); + return this; + } + + /** + * Sets the bucket's website index page. Behaves as the bucket's directory index where missing + * blobs are treated as potential directories. + */ + public Builder indexPage(String indexPage) { + this.indexPage = indexPage; + return this; + } + + /** + * Sets the custom object to return when a requested resource is not found. + */ + public Builder notFoundPage(String notFoundPage) { + this.notFoundPage = notFoundPage; + return this; + } + + /** + * Sets the bucket's lifecycle configuration as a number of delete rules. + * + * @see Lifecycle Management + */ + public Builder deleteRules(Iterable rules) { + this.deleteRules = rules != null ? ImmutableList.copyOf(rules) : null; + return this; + } + + /** + * Sets the bucket's storage class. This defines how blobs in the bucket are stored and + * determines the SLA and the cost of storage. A list of supported values is available + * here. + */ + public Builder storageClass(String storageClass) { + this.storageClass = storageClass; + return this; + } + + /** + * Sets the bucket's location. Data for blobs in the bucket resides in physical storage within + * this region. A list of supported values is available + * here. + */ + public Builder location(String location) { + this.location = location; + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + Builder createTime(Long createTime) { + this.createTime = createTime; + return this; + } + + Builder metageneration(Long metageneration) { + this.metageneration = metageneration; + return this; + } + + /** + * Sets the bucket's Cross-Origin Resource Sharing (CORS) configuration. + * + * @see + * Cross-Origin Resource Sharing (CORS) + */ + public Builder cors(Iterable cors) { + this.cors = cors != null ? ImmutableList.copyOf(cors) : null; + return this; + } + + /** + * Sets the bucket's access control configuration. + * + * @see + * About Access Control Lists + */ + public Builder acl(Iterable acl) { + this.acl = acl != null ? ImmutableList.copyOf(acl) : null; + return this; + } + + /** + * Sets the default access control configuration to apply to bucket's blobs when no other + * configuration is specified. + * + * @see + * About Access Control Lists + */ + public Builder defaultAcl(Iterable acl) { + this.defaultAcl = acl != null ? ImmutableList.copyOf(acl) : null; + return this; + } + + /** + * Sets the storage service object used to send requests relevant to the bucket. + */ + public Builder storage(Storage storage) { + this.storage = storage; + return this; + } + + /** + * Creates a {@code Bucket} object. + */ + public Bucket build() { + checkNotNull(name); + checkNotNull(storage); + return new Bucket(this); + } + } + + private Bucket(Builder builder) { + id = builder.id; + name = builder.name; + etag = builder.etag; + createTime = builder.createTime; + metageneration = builder.metageneration; + location = builder.location; + storageClass = builder.storageClass; + cors = builder.cors; + acl = builder.acl; + defaultAcl = builder.defaultAcl; + owner = builder.owner; + selfLink = builder.selfLink; + versioningEnabled = builder.versioningEnabled; + indexPage = builder.indexPage; + notFoundPage = builder.notFoundPage; + deleteRules = builder.deleteRules; + options = builder.storage.options(); + storage = builder.storage; + } + /** - * Constructs a {@code Bucket} object for the provided {@code BucketInfo}. The storage service is - * used to issue requests. - * - * @param storage the storage service used for issuing requests - * @param info bucket's info + * Returns the bucket's id. + */ + public String id() { + return id; + } + + /** + * Returns the bucket's name. + */ + public String name() { + return name; + } + + /** + * Returns the bucket's owner. This is always the project team's owner group. + */ + public Entity owner() { + return owner; + } + + /** + * Returns the URI of this bucket as a string. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns {@code true} if versioning is fully enabled for this bucket, {@code false} otherwise. + */ + public Boolean versioningEnabled() { + return Data.isNull(versioningEnabled) ? null : versioningEnabled; + } + + /** + * Returns bucket's website index page. Behaves as the bucket's directory index where missing + * blobs are treated as potential directories. + */ + public String indexPage() { + return indexPage; + } + + /** + * Returns the custom object to return when a requested resource is not found. + */ + public String notFoundPage() { + return notFoundPage; + } + + /** + * Returns bucket's lifecycle configuration as a number of delete rules. + * + * @see Lifecycle Management + */ + public List deleteRules() { + return deleteRules; + } + + /** + * Returns HTTP 1.1 Entity tag for the bucket. + * + * @see Entity Tags + */ + public String etag() { + return etag; + } + + /** + * Returns the time at which the bucket was created. + */ + public Long createTime() { + return createTime; + } + + /** + * Returns the metadata generation of this bucket. + */ + public Long metageneration() { + return metageneration; + } + + /** + * Returns the bucket's location. Data for blobs in the bucket resides in physical storage within + * this region. + * + * @see Bucket Locations + */ + public String location() { + return location; + } + + /** + * Returns the bucket's storage class. This defines how blobs in the bucket are stored and + * determines the SLA and the cost of storage. + * + * @see Storage Classes + */ + public String storageClass() { + return storageClass; + } + + /** + * Returns the bucket's Cross-Origin Resource Sharing (CORS) configuration. + * + * @see + * Cross-Origin Resource Sharing (CORS) + */ + public List cors() { + return cors; + } + + /** + * Returns the bucket's access control configuration. + * + * @see + * About Access Control Lists + */ + public List acl() { + return acl; + } + + /** + * Returns the default access control configuration for this bucket's blobs. + * + * @see + * About Access Control Lists + */ + public List defaultAcl() { + return defaultAcl; + } + + /** + * Returns a builder for the current bucket. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Bucket && Objects.equals(toPb(), ((Bucket) obj).toPb()) + && Objects.equals(options, ((Bucket) obj).options); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", name()) + .toString(); + } + + com.google.api.services.storage.model.Bucket toPb() { + com.google.api.services.storage.model.Bucket bucketPb = + new com.google.api.services.storage.model.Bucket(); + bucketPb.setId(id); + bucketPb.setName(name); + bucketPb.setEtag(etag); + if (createTime != null) { + bucketPb.setTimeCreated(new DateTime(createTime)); + } + if (metageneration != null) { + bucketPb.setMetageneration(metageneration); + } + if (location != null) { + bucketPb.setLocation(location); + } + if (storageClass != null) { + bucketPb.setStorageClass(storageClass); + } + if (cors != null) { + bucketPb.setCors(transform(cors, Cors.TO_PB_FUNCTION)); + } + if (acl != null) { + bucketPb.setAcl(transform(acl, new Function() { + @Override + public BucketAccessControl apply(Acl acl) { + return acl.toBucketPb(); + } + })); + } + if (defaultAcl != null) { + bucketPb.setDefaultObjectAcl(transform(defaultAcl, new Function() { + @Override + public ObjectAccessControl apply(Acl acl) { + return acl.toObjectPb(); + } + })); + } + if (owner != null) { + bucketPb.setOwner(new Owner().setEntity(owner.toPb())); + } + bucketPb.setSelfLink(selfLink); + if (versioningEnabled != null) { + bucketPb.setVersioning(new Versioning().setEnabled(versioningEnabled)); + } + if (indexPage != null || notFoundPage != null) { + Website website = new Website(); + website.setMainPageSuffix(indexPage); + website.setNotFoundPage(notFoundPage); + bucketPb.setWebsite(website); + } + if (deleteRules != null) { + Lifecycle lifecycle = new Lifecycle(); + lifecycle.setRule(transform(deleteRules, new Function() { + @Override + public Rule apply(DeleteRule deleteRule) { + return deleteRule.toPb(); + } + })); + bucketPb.setLifecycle(lifecycle); + } + return bucketPb; + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.storage = options.service(); + } + + /** + * Creates a {@code Bucket} object for the provided bucket name. */ - public Bucket(Storage storage, BucketInfo info) { - this.storage = checkNotNull(storage); - this.info = checkNotNull(info); + public static Bucket of(Storage storage, String name) { + return builder(storage, name).build(); + } + + /** + * Returns a {@code Bucket} builder where the bucket's name is set to the provided name. + */ + public static Builder builder(Storage storage, String name) { + return new Builder().name(name).storage(storage); + } + + static Bucket fromPb(Storage storage, com.google.api.services.storage.model.Bucket bucketPb) { + Builder builder = new Builder().name(bucketPb.getName()).storage(storage); + if (bucketPb.getId() != null) { + builder.id(bucketPb.getId()); + } + if (bucketPb.getEtag() != null) { + builder.etag(bucketPb.getEtag()); + } + if (bucketPb.getMetageneration() != null) { + builder.metageneration(bucketPb.getMetageneration()); + } + if (bucketPb.getSelfLink() != null) { + builder.selfLink(bucketPb.getSelfLink()); + } + if (bucketPb.getTimeCreated() != null) { + builder.createTime(bucketPb.getTimeCreated().getValue()); + } + if (bucketPb.getLocation() != null) { + builder.location(bucketPb.getLocation()); + } + if (bucketPb.getStorageClass() != null) { + builder.storageClass(bucketPb.getStorageClass()); + } + if (bucketPb.getCors() != null) { + builder.cors(transform(bucketPb.getCors(), Cors.FROM_PB_FUNCTION)); + } + if (bucketPb.getAcl() != null) { + builder.acl(transform(bucketPb.getAcl(), new Function() { + @Override + public Acl apply(BucketAccessControl bucketAccessControl) { + return Acl.fromPb(bucketAccessControl); + } + })); + } + if (bucketPb.getDefaultObjectAcl() != null) { + builder.defaultAcl(transform(bucketPb.getDefaultObjectAcl(), + new Function() { + @Override + public Acl apply(ObjectAccessControl objectAccessControl) { + return Acl.fromPb(objectAccessControl); + } + })); + } + if (bucketPb.getOwner() != null) { + builder.owner(Entity.fromPb(bucketPb.getOwner().getEntity())); + } + if (bucketPb.getVersioning() != null) { + builder.versioningEnabled(bucketPb.getVersioning().getEnabled()); + } + Website website = bucketPb.getWebsite(); + if (website != null) { + builder.indexPage(website.getMainPageSuffix()); + builder.notFoundPage(website.getNotFoundPage()); + } + if (bucketPb.getLifecycle() != null && bucketPb.getLifecycle().getRule() != null) { + builder.deleteRules(transform(bucketPb.getLifecycle().getRule(), + new Function() { + @Override + public DeleteRule apply(Rule rule) { + return DeleteRule.fromPb(rule); + } + })); + } + return builder.build(); + } + + /** + * Performs an RPC to create this bucket. + * + * @return the {@code Bucket} object representing the newly-created bucket. + * @throws StorageException upon failure + */ + public Bucket create(BucketTargetOption... options) { + return storage.create(this, options); } /** * Creates a {@code Bucket} object for the provided bucket name. Performs an RPC call to get the * latest bucket information. - * + * * @param storage the storage service used for issuing requests * @param bucket bucket's name * @param options blob get options @@ -215,15 +948,7 @@ public Bucket(Storage storage, BucketInfo info) { * @throws StorageException upon failure */ public static Bucket get(Storage storage, String bucket, Storage.BucketGetOption... options) { - BucketInfo info = storage.get(bucket, options); - return info != null ? new Bucket(storage, info) : null; - } - - /** - * Returns the bucket's information. - */ - public BucketInfo info() { - return info; + return storage.get(bucket, options); } /** @@ -234,9 +959,9 @@ public BucketInfo info() { */ public boolean exists(BucketSourceOption... options) { int length = options.length; - Storage.BucketGetOption[] getOptions = Arrays.copyOf(toGetOptions(info, options), length + 1); + Storage.BucketGetOption[] getOptions = Arrays.copyOf(toGetOptions(this, options), length + 1); getOptions[length] = Storage.BucketGetOption.fields(); - return storage.get(info.name(), getOptions) != null; + return storage.get(name(), getOptions) != null; } /** @@ -247,7 +972,7 @@ public boolean exists(BucketSourceOption... options) { * @throws StorageException upon failure */ public Bucket reload(BucketSourceOption... options) { - return Bucket.get(storage, info.name(), toGetOptions(info, options)); + return Bucket.get(storage, name(), toGetOptions(this, options)); } /** @@ -255,16 +980,16 @@ public Bucket reload(BucketSourceOption... options) { * is returned. By default no checks are made on the metadata generation of the current bucket. * If you want to update the information only if the current bucket metadata are at their latest * version use the {@code metagenerationMatch} option: - * {@code bucket.update(newInfo, BucketTargetOption.metagenerationMatch())} + * {@code bucket.update(newBucket, BucketTargetOption.metagenerationMatch())} * - * @param bucketInfo new bucket's information. Name must match the one of the current bucket + * @param bucket the new bucket. Name must match the one of the current bucket * @param options update options * @return a {@code Bucket} object with updated information * @throws StorageException upon failure */ - public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { - checkArgument(Objects.equals(bucketInfo.name(), info.name()), "Bucket name must match"); - return new Bucket(storage, storage.update(bucketInfo, options)); + public Bucket update(Bucket bucket, BucketTargetOption... options) { + checkArgument(Objects.equals(bucket.name(), name()), "Bucket name must match"); + return storage.update(bucket, options); } /** @@ -275,36 +1000,33 @@ public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { * @throws StorageException upon failure */ public boolean delete(BucketSourceOption... options) { - return storage.delete(info.name(), toSourceOptions(info, options)); + return storage.delete(name(), toSourceOptions(this, options)); } /** * Returns the paginated list of {@code Blob} in this bucket. - * + * * @param options options for listing blobs * @throws StorageException upon failure */ public Page list(Storage.BlobListOption... options) { - Page infoPage = storage.list(info.name(), options); - StorageOptions storageOptions = storage.options(); - return new PageImpl<>(new BlobPageFetcher(storageOptions, infoPage), infoPage.nextPageCursor(), - new LazyBlobIterable(storageOptions, infoPage.values())); + return storage.list(name, options); } /** * Returns the requested blob in this bucket or {@code null} if not found. - * + * * @param blob name of the requested blob * @param options blob search options * @throws StorageException upon failure */ public Blob get(String blob, BlobGetOption... options) { - return new Blob(storage, storage.get(BlobId.of(info.name(), blob), options)); + return storage.get(BlobId.of(name, blob), options); } /** * Returns a list of requested blobs in this bucket. Blobs that do not exist are null. - * + * * @param blobName1 first blob to get * @param blobName2 second blob to get * @param blobNames other blobs to get @@ -313,16 +1035,15 @@ public Blob get(String blob, BlobGetOption... options) { */ public List get(String blobName1, String blobName2, String... blobNames) { BatchRequest.Builder batch = BatchRequest.builder(); - batch.get(info.name(), blobName1); - batch.get(info.name(), blobName2); - for (String name : blobNames) { - batch.get(info.name(), name); + batch.get(name, blobName1); + batch.get(name, blobName2); + for (String blobName : blobNames) { + batch.get(name, blobName); } List blobs = new ArrayList<>(blobNames.length); BatchResponse response = storage.apply(batch.build()); - for (BatchResponse.Result result : response.gets()) { - BlobInfo blobInfo = result.get(); - blobs.add(blobInfo != null ? new Blob(storage, blobInfo) : null); + for (BatchResponse.Result result : response.gets()) { + blobs.add(result.get()); } return Collections.unmodifiableList(blobs); } @@ -332,39 +1053,43 @@ public List get(String blobName1, String blobName2, String... blobNames) { * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. MD5 and CRC32C hashes of {@code content} are * computed and used for validating transferred data. - * - * @param blob a blob name + * + * @param blobName a blob name * @param content the blob content * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation - * @return a complete blob information + * @return a complete blob * @throws StorageException upon failure */ - public Blob create(String blob, byte[] content, String contentType, BlobTargetOption... options) { - BlobInfo blobInfo = BlobInfo.builder(BlobId.of(info.name(), blob)) + public Blob create( + String blobName, byte[] content, String contentType, BlobTargetOption... options) { + Blob blob = + Blob.builder(storage, BlobId.of(name, blobName)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); - return new Blob(storage, storage.create(blobInfo, content, options)); + return storage.create(blob, content, options); } /** * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. - * - * @param blob a blob name + * + * @param blobName a blob name * @param content the blob content as a stream * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation - * @return a complete blob information + * @return a complete blob * @throws StorageException upon failure */ - public Blob create(String blob, InputStream content, String contentType, + public Blob create( + String blobName, InputStream content, String contentType, BlobWriteOption... options) { - BlobInfo blobInfo = BlobInfo.builder(BlobId.of(info.name(), blob)) + Blob blob = + Blob.builder(storage, BlobId.of(name, blobName)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); - return new Blob(storage, storage.create(blobInfo, content, options)); + return storage.create(blob, content, options); } /** diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java deleted file mode 100644 index 62fbf9c6521f..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java +++ /dev/null @@ -1,814 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.api.client.repackaged.com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.MoreObjects.firstNonNull; -import static com.google.common.collect.Lists.transform; - -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.client.util.Data; -import com.google.api.client.util.DateTime; -import com.google.api.services.storage.model.Bucket.Lifecycle; -import com.google.api.services.storage.model.Bucket.Lifecycle.Rule; -import com.google.api.services.storage.model.Bucket.Owner; -import com.google.api.services.storage.model.Bucket.Versioning; -import com.google.api.services.storage.model.Bucket.Website; -import com.google.api.services.storage.model.BucketAccessControl; -import com.google.api.services.storage.model.ObjectAccessControl; -import com.google.common.base.Function; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; -import com.google.gcloud.storage.Acl.Entity; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.util.List; -import java.util.Objects; - -/** - * Google Storage bucket metadata; - * - * @see Concepts and - * Terminology - */ -public final class BucketInfo implements Serializable { - - static final Function FROM_PB_FUNCTION = - new Function() { - @Override - public BucketInfo apply(com.google.api.services.storage.model.Bucket pb) { - return BucketInfo.fromPb(pb); - } - }; - static final Function TO_PB_FUNCTION = - new Function() { - @Override - public com.google.api.services.storage.model.Bucket apply(BucketInfo bucketInfo) { - return bucketInfo.toPb(); - } - }; - private static final long serialVersionUID = -3946094202176916586L; - private final String id; - private final String name; - private final Acl.Entity owner; - private final String selfLink; - private final Boolean versioningEnabled; - private final String indexPage; - private final String notFoundPage; - private final List deleteRules; - private final String etag; - private final Long createTime; - private final Long metageneration; - private final List cors; - private final List acl; - private final List defaultAcl; - private final String location; - private final String storageClass; - - /** - * Base class for bucket's delete rules. Allows to configure automatic deletion of blobs and blobs - * versions. - * - * @see Object Lifecycle Management - */ - public abstract static class DeleteRule implements Serializable { - - private static final long serialVersionUID = 3137971668395933033L; - private static final String SUPPORTED_ACTION = "Delete"; - private final Type type; - - public enum Type { - AGE, CREATE_BEFORE, NUM_NEWER_VERSIONS, IS_LIVE, UNKNOWN - } - - DeleteRule(Type type) { - this.type = type; - } - - public Type type() { - return type; - } - - @Override - public int hashCode() { - return Objects.hash(type); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - final DeleteRule other = (DeleteRule) obj; - return Objects.equals(toPb(), other.toPb()); - } - - Rule toPb() { - Rule rule = new Rule(); - rule.setAction(new Rule.Action().setType(SUPPORTED_ACTION)); - Rule.Condition condition = new Rule.Condition(); - populateCondition(condition); - rule.setCondition(condition); - return rule; - } - - abstract void populateCondition(Rule.Condition condition); - - static DeleteRule fromPb(Rule rule) { - if (rule.getAction() != null && SUPPORTED_ACTION.endsWith(rule.getAction().getType())) { - Rule.Condition condition = rule.getCondition(); - Integer age = condition.getAge(); - if (age != null) { - return new AgeDeleteRule(age); - } - DateTime dateTime = condition.getCreatedBefore(); - if (dateTime != null) { - return new CreatedBeforeDeleteRule(dateTime.getValue()); - } - Integer numNewerVersions = condition.getNumNewerVersions(); - if (numNewerVersions != null) { - return new NumNewerVersionsDeleteRule(numNewerVersions); - } - Boolean isLive = condition.getIsLive(); - if (isLive != null) { - return new IsLiveDeleteRule(isLive); - } - } - return new RawDeleteRule(rule); - } - } - - /** - * Delete rule class that sets a Time To Live for blobs in the bucket. - * - * @see Object Lifecycle Management - */ - public static class AgeDeleteRule extends DeleteRule { - - private static final long serialVersionUID = 5697166940712116380L; - private final int daysToLive; - - /** - * Creates an {@code AgeDeleteRule} object. - * - * @param daysToLive blobs' Time To Live expressed in days. The time when the age condition is - * considered to be satisfied is computed by adding {@code daysToLive} days to the - * midnight following blob's creation time in UTC. - */ - public AgeDeleteRule(int daysToLive) { - super(Type.AGE); - this.daysToLive = daysToLive; - } - - public int daysToLive() { - return daysToLive; - } - - @Override - void populateCondition(Rule.Condition condition) { - condition.setAge(daysToLive); - } - } - - static class RawDeleteRule extends DeleteRule { - - private static final long serialVersionUID = -7166938278642301933L; - - private transient Rule rule; - - RawDeleteRule(Rule rule) { - super(Type.UNKNOWN); - this.rule = rule; - } - - @Override - void populateCondition(Rule.Condition condition) { - throw new UnsupportedOperationException(); - } - - private void writeObject(ObjectOutputStream out) throws IOException { - out.defaultWriteObject(); - out.writeUTF(rule.toString()); - } - - private void readObject(ObjectInputStream in) throws IOException, - ClassNotFoundException { - in.defaultReadObject(); - rule = new JacksonFactory().fromString(in.readUTF(), Rule.class); - } - - @Override - Rule toPb() { - return rule; - } - } - - /** - * Delete rule class for blobs in the bucket that have been created before a certain date. - * - * @see Object Lifecycle Management - */ - public static class CreatedBeforeDeleteRule extends DeleteRule { - - private static final long serialVersionUID = 881692650279195867L; - private final long timeMillis; - - /** - * Creates an {@code CreatedBeforeDeleteRule} object. - * - * @param timeMillis a date in UTC. Blobs that have been created before midnight of the provided - * date meet the delete condition - */ - public CreatedBeforeDeleteRule(long timeMillis) { - super(Type.CREATE_BEFORE); - this.timeMillis = timeMillis; - } - - public long timeMillis() { - return timeMillis; - } - - @Override - void populateCondition(Rule.Condition condition) { - condition.setCreatedBefore(new DateTime(timeMillis)); - } - } - - /** - * Delete rule class for versioned blobs. Specifies when to delete a blob's version according to - * the number of available newer versions for that blob. - * - * @see Object Lifecycle Management - */ - public static class NumNewerVersionsDeleteRule extends DeleteRule { - - private static final long serialVersionUID = -1955554976528303894L; - private final int numNewerVersions; - - /** - * Creates an {@code NumNewerVersionsDeleteRule} object. - * - * @param numNewerVersions the number of newer versions. A blob's version meets the delete - * condition when {@code numNewerVersions} newer versions are available. - */ - public NumNewerVersionsDeleteRule(int numNewerVersions) { - super(Type.NUM_NEWER_VERSIONS); - this.numNewerVersions = numNewerVersions; - } - - public int numNewerVersions() { - return numNewerVersions; - } - - @Override - void populateCondition(Rule.Condition condition) { - condition.setNumNewerVersions(numNewerVersions); - } - } - - /** - * Delete rule class to distinguish between live and archived blobs. - * - * @see Object Lifecycle Management - */ - public static class IsLiveDeleteRule extends DeleteRule { - - private static final long serialVersionUID = -3502994563121313364L; - private final boolean isLive; - - /** - * Creates an {@code IsLiveDeleteRule} object. - * - * @param isLive if set to {@code true} live blobs meet the delete condition. If set to - * {@code false} delete condition is met by archived blobs. - */ - public IsLiveDeleteRule(boolean isLive) { - super(Type.IS_LIVE); - this.isLive = isLive; - } - - public boolean isLive() { - return isLive; - } - - @Override - void populateCondition(Rule.Condition condition) { - condition.setIsLive(isLive); - } - } - - public static final class Builder { - - private String id; - private String name; - private Acl.Entity owner; - private String selfLink; - private Boolean versioningEnabled; - private String indexPage; - private String notFoundPage; - private List deleteRules; - private String storageClass; - private String location; - private String etag; - private Long createTime; - private Long metageneration; - private List cors; - private List acl; - private List defaultAcl; - - private Builder() {} - - private Builder(BucketInfo bucketInfo) { - id = bucketInfo.id; - name = bucketInfo.name; - etag = bucketInfo.etag; - createTime = bucketInfo.createTime; - metageneration = bucketInfo.metageneration; - location = bucketInfo.location; - storageClass = bucketInfo.storageClass; - cors = bucketInfo.cors; - acl = bucketInfo.acl; - defaultAcl = bucketInfo.defaultAcl; - owner = bucketInfo.owner; - selfLink = bucketInfo.selfLink; - versioningEnabled = bucketInfo.versioningEnabled; - indexPage = bucketInfo.indexPage; - notFoundPage = bucketInfo.notFoundPage; - deleteRules = bucketInfo.deleteRules; - } - - /** - * Sets the bucket's name. - */ - public Builder name(String name) { - this.name = checkNotNull(name); - return this; - } - - Builder id(String id) { - this.id = id; - return this; - } - - Builder owner(Acl.Entity owner) { - this.owner = owner; - return this; - } - - Builder selfLink(String selfLink) { - this.selfLink = selfLink; - return this; - } - - /** - * Sets whether versioning should be enabled for this bucket. When set to true, versioning is - * fully enabled. - */ - public Builder versioningEnabled(Boolean enable) { - this.versioningEnabled = firstNonNull(enable, Data.nullOf(Boolean.class)); - return this; - } - - /** - * Sets the bucket's website index page. Behaves as the bucket's directory index where missing - * blobs are treated as potential directories. - */ - public Builder indexPage(String indexPage) { - this.indexPage = indexPage; - return this; - } - - /** - * Sets the custom object to return when a requested resource is not found. - */ - public Builder notFoundPage(String notFoundPage) { - this.notFoundPage = notFoundPage; - return this; - } - - /** - * Sets the bucket's lifecycle configuration as a number of delete rules. - * - * @see Lifecycle Management - */ - public Builder deleteRules(Iterable rules) { - this.deleteRules = rules != null ? ImmutableList.copyOf(rules) : null; - return this; - } - - /** - * Sets the bucket's storage class. This defines how blobs in the bucket are stored and - * determines the SLA and the cost of storage. A list of supported values is available - * here. - */ - public Builder storageClass(String storageClass) { - this.storageClass = storageClass; - return this; - } - - /** - * Sets the bucket's location. Data for blobs in the bucket resides in physical storage within - * this region. A list of supported values is available - * here. - */ - public Builder location(String location) { - this.location = location; - return this; - } - - Builder etag(String etag) { - this.etag = etag; - return this; - } - - Builder createTime(Long createTime) { - this.createTime = createTime; - return this; - } - - Builder metageneration(Long metageneration) { - this.metageneration = metageneration; - return this; - } - - /** - * Sets the bucket's Cross-Origin Resource Sharing (CORS) configuration. - * - * @see - * Cross-Origin Resource Sharing (CORS) - */ - public Builder cors(Iterable cors) { - this.cors = cors != null ? ImmutableList.copyOf(cors) : null; - return this; - } - - /** - * Sets the bucket's access control configuration. - * - * @see - * About Access Control Lists - */ - public Builder acl(Iterable acl) { - this.acl = acl != null ? ImmutableList.copyOf(acl) : null; - return this; - } - - /** - * Sets the default access control configuration to apply to bucket's blobs when no other - * configuration is specified. - * - * @see - * About Access Control Lists - */ - public Builder defaultAcl(Iterable acl) { - this.defaultAcl = acl != null ? ImmutableList.copyOf(acl) : null; - return this; - } - - /** - * Creates a {@code BucketInfo} object. - */ - public BucketInfo build() { - checkNotNull(name); - return new BucketInfo(this); - } - } - - private BucketInfo(Builder builder) { - id = builder.id; - name = builder.name; - etag = builder.etag; - createTime = builder.createTime; - metageneration = builder.metageneration; - location = builder.location; - storageClass = builder.storageClass; - cors = builder.cors; - acl = builder.acl; - defaultAcl = builder.defaultAcl; - owner = builder.owner; - selfLink = builder.selfLink; - versioningEnabled = builder.versioningEnabled; - indexPage = builder.indexPage; - notFoundPage = builder.notFoundPage; - deleteRules = builder.deleteRules; - } - - /** - * Returns the bucket's id. - */ - public String id() { - return id; - } - - /** - * Returns the bucket's name. - */ - public String name() { - return name; - } - - /** - * Returns the bucket's owner. This is always the project team's owner group. - */ - public Entity owner() { - return owner; - } - - /** - * Returns the URI of this bucket as a string. - */ - public String selfLink() { - return selfLink; - } - - /** - * Returns {@code true} if versioning is fully enabled for this bucket, {@code false} otherwise. - */ - public Boolean versioningEnabled() { - return Data.isNull(versioningEnabled) ? null : versioningEnabled; - } - - /** - * Returns bucket's website index page. Behaves as the bucket's directory index where missing - * blobs are treated as potential directories. - */ - public String indexPage() { - return indexPage; - } - - /** - * Returns the custom object to return when a requested resource is not found. - */ - public String notFoundPage() { - return notFoundPage; - } - - /** - * Returns bucket's lifecycle configuration as a number of delete rules. - * - * @see Lifecycle Management - */ - public List deleteRules() { - return deleteRules; - } - - /** - * Returns HTTP 1.1 Entity tag for the bucket. - * - * @see Entity Tags - */ - public String etag() { - return etag; - } - - /** - * Returns the time at which the bucket was created. - */ - public Long createTime() { - return createTime; - } - - /** - * Returns the metadata generation of this bucket. - */ - public Long metageneration() { - return metageneration; - } - - /** - * Returns the bucket's location. Data for blobs in the bucket resides in physical storage within - * this region. - * - * @see Bucket Locations - */ - public String location() { - return location; - } - - /** - * Returns the bucket's storage class. This defines how blobs in the bucket are stored and - * determines the SLA and the cost of storage. - * - * @see Storage Classes - */ - public String storageClass() { - return storageClass; - } - - /** - * Returns the bucket's Cross-Origin Resource Sharing (CORS) configuration. - * - * @see - * Cross-Origin Resource Sharing (CORS) - */ - public List cors() { - return cors; - } - - /** - * Returns the bucket's access control configuration. - * - * @see - * About Access Control Lists - */ - public List acl() { - return acl; - } - - /** - * Returns the default access control configuration for this bucket's blobs. - * - * @see - * About Access Control Lists - */ - public List defaultAcl() { - return defaultAcl; - } - - /** - * Returns a builder for the current bucket. - */ - public Builder toBuilder() { - return new Builder(this); - } - - @Override - public int hashCode() { - return Objects.hash(name); - } - - @Override - public boolean equals(Object obj) { - return obj instanceof BucketInfo && Objects.equals(toPb(), ((BucketInfo) obj).toPb()); - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("name", name()) - .toString(); - } - - com.google.api.services.storage.model.Bucket toPb() { - com.google.api.services.storage.model.Bucket bucketPb = - new com.google.api.services.storage.model.Bucket(); - bucketPb.setId(id); - bucketPb.setName(name); - bucketPb.setEtag(etag); - if (createTime != null) { - bucketPb.setTimeCreated(new DateTime(createTime)); - } - if (metageneration != null) { - bucketPb.setMetageneration(metageneration); - } - if (location != null) { - bucketPb.setLocation(location); - } - if (storageClass != null) { - bucketPb.setStorageClass(storageClass); - } - if (cors != null) { - bucketPb.setCors(transform(cors, Cors.TO_PB_FUNCTION)); - } - if (acl != null) { - bucketPb.setAcl(transform(acl, new Function() { - @Override - public BucketAccessControl apply(Acl acl) { - return acl.toBucketPb(); - } - })); - } - if (defaultAcl != null) { - bucketPb.setDefaultObjectAcl(transform(defaultAcl, new Function() { - @Override - public ObjectAccessControl apply(Acl acl) { - return acl.toObjectPb(); - } - })); - } - if (owner != null) { - bucketPb.setOwner(new Owner().setEntity(owner.toPb())); - } - bucketPb.setSelfLink(selfLink); - if (versioningEnabled != null) { - bucketPb.setVersioning(new Versioning().setEnabled(versioningEnabled)); - } - if (indexPage != null || notFoundPage != null) { - Website website = new Website(); - website.setMainPageSuffix(indexPage); - website.setNotFoundPage(notFoundPage); - bucketPb.setWebsite(website); - } - if (deleteRules != null) { - Lifecycle lifecycle = new Lifecycle(); - lifecycle.setRule(transform(deleteRules, new Function() { - @Override - public Rule apply(DeleteRule deleteRule) { - return deleteRule.toPb(); - } - })); - bucketPb.setLifecycle(lifecycle); - } - return bucketPb; - } - - /** - * Creates a {@code BucketInfo} object for the provided bucket name. - */ - public static BucketInfo of(String name) { - return builder(name).build(); - } - - /** - * Returns a {@code BucketInfo} builder where the bucket's name is set to the provided name. - */ - public static Builder builder(String name) { - return new Builder().name(name); - } - - static BucketInfo fromPb(com.google.api.services.storage.model.Bucket bucketPb) { - Builder builder = new Builder().name(bucketPb.getName()); - if (bucketPb.getId() != null) { - builder.id(bucketPb.getId()); - } - if (bucketPb.getEtag() != null) { - builder.etag(bucketPb.getEtag()); - } - if (bucketPb.getMetageneration() != null) { - builder.metageneration(bucketPb.getMetageneration()); - } - if (bucketPb.getSelfLink() != null) { - builder.selfLink(bucketPb.getSelfLink()); - } - if (bucketPb.getTimeCreated() != null) { - builder.createTime(bucketPb.getTimeCreated().getValue()); - } - if (bucketPb.getLocation() != null) { - builder.location(bucketPb.getLocation()); - } - if (bucketPb.getStorageClass() != null) { - builder.storageClass(bucketPb.getStorageClass()); - } - if (bucketPb.getCors() != null) { - builder.cors(transform(bucketPb.getCors(), Cors.FROM_PB_FUNCTION)); - } - if (bucketPb.getAcl() != null) { - builder.acl(transform(bucketPb.getAcl(), new Function() { - @Override - public Acl apply(BucketAccessControl bucketAccessControl) { - return Acl.fromPb(bucketAccessControl); - } - })); - } - if (bucketPb.getDefaultObjectAcl() != null) { - builder.defaultAcl(transform(bucketPb.getDefaultObjectAcl(), - new Function() { - @Override - public Acl apply(ObjectAccessControl objectAccessControl) { - return Acl.fromPb(objectAccessControl); - } - })); - } - if (bucketPb.getOwner() != null) { - builder.owner(Entity.fromPb(bucketPb.getOwner().getEntity())); - } - if (bucketPb.getVersioning() != null) { - builder.versioningEnabled(bucketPb.getVersioning().getEnabled()); - } - Website website = bucketPb.getWebsite(); - if (website != null) { - builder.indexPage(website.getMainPageSuffix()); - builder.notFoundPage(website.getNotFoundPage()); - } - if (bucketPb.getLifecycle() != null && bucketPb.getLifecycle().getRule() != null) { - builder.deleteRules(transform(bucketPb.getLifecycle().getRule(), - new Function() { - @Override - public DeleteRule apply(Rule rule) { - return DeleteRule.fromPb(rule); - } - })); - } - return builder.build(); - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java index 1e5427a847d4..1aa91fe713e8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java @@ -45,9 +45,11 @@ public class CopyWriter implements Restorable { private final StorageOptions serviceOptions; private final StorageRpc storageRpc; private RewriteResponse rewriteResponse; + private transient Storage storage; CopyWriter(StorageOptions serviceOptions, RewriteResponse rewriteResponse) { this.serviceOptions = serviceOptions; + this.storage = serviceOptions.service(); this.rewriteResponse = rewriteResponse; this.storageRpc = serviceOptions.rpc(); } @@ -64,11 +66,11 @@ public class CopyWriter implements Restorable { * * @throws StorageException upon failure */ - public BlobInfo result() { + public Blob result() { while (!isDone()) { copyChunk(); } - return BlobInfo.fromPb(rewriteResponse.result); + return Blob.fromPb(storage, rewriteResponse.result); } /** @@ -119,7 +121,7 @@ public RestorableState capture() { serviceOptions, BlobId.fromPb(rewriteResponse.rewriteRequest.source), rewriteResponse.rewriteRequest.sourceOptions, - BlobInfo.fromPb(rewriteResponse.rewriteRequest.target), + Blob.fromPb(storage, rewriteResponse.rewriteRequest.target), rewriteResponse.rewriteRequest.targetOptions) .blobSize(blobSize()) .isDone(isDone()) @@ -136,9 +138,9 @@ static class StateImpl implements RestorableState, Serializable { private final StorageOptions serviceOptions; private final BlobId source; private final Map sourceOptions; - private final BlobInfo target; + private final Blob target; private final Map targetOptions; - private final BlobInfo result; + private final Blob result; private final long blobSize; private final boolean isDone; private final String rewriteToken; @@ -164,9 +166,9 @@ static class Builder { private final StorageOptions serviceOptions; private final BlobId source; private final Map sourceOptions; - private final BlobInfo target; + private final Blob target; private final Map targetOptions; - private BlobInfo result; + private Blob result; private long blobSize; private boolean isDone; private String rewriteToken; @@ -174,8 +176,8 @@ static class Builder { private Long megabytesCopiedPerChunk; private Builder(StorageOptions options, BlobId source, - Map sourceOptions, - BlobInfo target, Map targetOptions) { + Map sourceOptions, Blob target, + Map targetOptions) { this.serviceOptions = options; this.source = source; this.sourceOptions = sourceOptions; @@ -183,7 +185,7 @@ private Builder(StorageOptions options, BlobId source, this.targetOptions = targetOptions; } - Builder result(BlobInfo result) { + Builder result(Blob result) { this.result = result; return this; } @@ -219,7 +221,7 @@ RestorableState build() { } static Builder builder(StorageOptions options, BlobId source, - Map sourceOptions, BlobInfo target, + Map sourceOptions, Blob target, Map targetOptions) { return new Builder(options, source, sourceOptions, target, targetOptions); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index f8c90ff42930..6eec1109ce9d 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -335,23 +335,23 @@ public static BlobTargetOption metagenerationNotMatch() { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } - static Tuple convert(BlobInfo info, BlobWriteOption... options) { - BlobInfo.Builder infoBuilder = info.toBuilder().crc32c(null).md5(null); + static Tuple convert(Blob blob, BlobWriteOption... options) { + Blob.Builder blobBuilder = blob.toBuilder().crc32c(null).md5(null); List targetOptions = Lists.newArrayListWithCapacity(options.length); for (BlobWriteOption option : options) { switch (option.option) { case IF_CRC32C_MATCH: - infoBuilder.crc32c(info.crc32c()); + blobBuilder.crc32c(blob.crc32c()); break; case IF_MD5_MATCH: - infoBuilder.md5(info.md5()); + blobBuilder.md5(blob.md5()); break; default: targetOptions.add(option.toTargetOption()); break; } } - return Tuple.of(infoBuilder.build(), + return Tuple.of(blobBuilder.build(), targetOptions.toArray(new BlobTargetOption[targetOptions.size()])); } } @@ -786,7 +786,7 @@ class ComposeRequest implements Serializable { private static final long serialVersionUID = -7385681353748590911L; private final List sourceBlobs; - private final BlobInfo target; + private final Blob target; private final List targetOptions; /** @@ -821,7 +821,7 @@ public static class Builder { private final List sourceBlobs = new LinkedList<>(); private final Set targetOptions = new LinkedHashSet<>(); - private BlobInfo target; + private Blob target; /** * Add source blobs for compose operation. @@ -851,7 +851,7 @@ public Builder addSource(String blob, long generation) { /** * Sets compose operation's target blob. */ - public Builder target(BlobInfo target) { + public Builder target(Blob target) { this.target = target; return this; } @@ -898,7 +898,7 @@ public List sourceBlobs() { /** * Returns compose operation's target blob. */ - public BlobInfo target() { + public Blob target() { return target; } @@ -915,7 +915,7 @@ public List targetOptions() { * @param sources source blobs names * @param target target blob */ - public static ComposeRequest of(Iterable sources, BlobInfo target) { + public static ComposeRequest of(Iterable sources, Blob target) { return builder().target(target).addSource(sources).build(); } @@ -926,8 +926,9 @@ public static ComposeRequest of(Iterable sources, BlobInfo target) { * @param sources source blobs names * @param target target blob name */ - public static ComposeRequest of(String bucket, Iterable sources, String target) { - return of(sources, BlobInfo.builder(BlobId.of(bucket, target)).build()); + public static ComposeRequest of( + Storage storage, String bucket, Iterable sources, String target) { + return of(sources, Blob.builder(storage, BlobId.of(bucket, target)).build()); } /** @@ -947,7 +948,7 @@ class CopyRequest implements Serializable { private final BlobId source; private final List sourceOptions; - private final BlobInfo target; + private final Blob target; private final List targetOptions; private final Long megabytesCopiedPerChunk; @@ -956,7 +957,7 @@ public static class Builder { private final Set sourceOptions = new LinkedHashSet<>(); private final Set targetOptions = new LinkedHashSet<>(); private BlobId source; - private BlobInfo target; + private Blob target; private Long megabytesCopiedPerChunk; /** @@ -1004,8 +1005,8 @@ public Builder sourceOptions(Iterable options) { * * @return the builder */ - public Builder target(BlobId target) { - this.target = BlobInfo.builder(target).build(); + public Builder target(Storage storage, BlobId target) { + this.target = Blob.builder(storage, target).build(); return this; } @@ -1017,7 +1018,7 @@ public Builder target(BlobId target) { * @return the builder * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public Builder target(BlobInfo target, BlobTargetOption... options) + public Builder target(Blob target, BlobTargetOption... options) throws IllegalArgumentException { checkContentType(target); this.target = target; @@ -1033,7 +1034,7 @@ public Builder target(BlobInfo target, BlobTargetOption... options) * @return the builder * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public Builder target(BlobInfo target, Iterable options) + public Builder target(Blob target, Iterable options) throws IllegalArgumentException { checkContentType(target); this.target = target; @@ -1086,9 +1087,9 @@ public List sourceOptions() { } /** - * Returns the {@link BlobInfo} for the target blob. + * Returns the {@link Blob} for the target blob. */ - public BlobInfo target() { + public Blob target() { return target; } @@ -1115,11 +1116,11 @@ public Long megabytesCopiedPerChunk() { * * @param sourceBucket name of the bucket containing the source blob * @param sourceBlob name of the source blob - * @param target a {@code BlobInfo} object for the target blob + * @param target a {@code Blob} object for the target blob * @return a copy request * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo target) + public static CopyRequest of(String sourceBucket, String sourceBlob, Blob target) throws IllegalArgumentException { checkContentType(target); return builder().source(sourceBucket, sourceBlob).target(target).build(); @@ -1131,28 +1132,29 @@ public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo ta * field. * * @param sourceBlobId a {@code BlobId} object for the source blob - * @param target a {@code BlobInfo} object for the target blob + * @param target a {@code Blob} object for the target blob * @return a copy request * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) + public static CopyRequest of(BlobId sourceBlobId, Blob target) throws IllegalArgumentException { checkContentType(target); return builder().source(sourceBlobId).target(target).build(); } /** - * Creates a copy request. Target blob information is copied from source. + * Creates a copy request. Target blob is copied from source. * * @param sourceBucket name of the bucket containing both the source and the target blob * @param sourceBlob name of the source blob * @param targetBlob name of the target blob * @return a copy request */ - public static CopyRequest of(String sourceBucket, String sourceBlob, String targetBlob) { + public static CopyRequest of( + Storage storage, String sourceBucket, String sourceBlob, String targetBlob) { return CopyRequest.builder() .source(sourceBucket, sourceBlob) - .target(BlobId.of(sourceBucket, targetBlob)) + .target(storage, BlobId.of(sourceBucket, targetBlob)) .build(); } @@ -1164,8 +1166,9 @@ public static CopyRequest of(String sourceBucket, String sourceBlob, String targ * @param target a {@code BlobId} object for the target blob * @return a copy request */ - public static CopyRequest of(String sourceBucket, String sourceBlob, BlobId target) { - return builder().source(sourceBucket, sourceBlob).target(target).build(); + public static CopyRequest of( + Storage storage, String sourceBucket, String sourceBlob, BlobId target) { + return builder().source(sourceBucket, sourceBlob).target(storage, target).build(); } /** @@ -1175,10 +1178,10 @@ public static CopyRequest of(String sourceBucket, String sourceBlob, BlobId targ * @param targetBlob name of the target blob, in the same bucket of the source blob * @return a copy request */ - public static CopyRequest of(BlobId sourceBlobId, String targetBlob) { + public static CopyRequest of(Storage storage, BlobId sourceBlobId, String targetBlob) { return CopyRequest.builder() .source(sourceBlobId) - .target(BlobId.of(sourceBlobId.bucket(), targetBlob)) + .target(storage, BlobId.of(sourceBlobId.bucket(), targetBlob)) .build(); } @@ -1189,101 +1192,98 @@ public static CopyRequest of(BlobId sourceBlobId, String targetBlob) { * @param targetBlobId a {@code BlobId} object for the target blob * @return a copy request */ - public static CopyRequest of(BlobId sourceBlobId, BlobId targetBlobId) { - return CopyRequest.builder() - .source(sourceBlobId) - .target(targetBlobId) - .build(); + public static CopyRequest of(Storage storage, BlobId sourceBlobId, BlobId targetBlobId) { + return CopyRequest.builder().source(sourceBlobId).target(storage, targetBlobId).build(); } public static Builder builder() { return new Builder(); } - private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentException { - checkArgument(blobInfo.contentType() != null, "Blob content type can not be null"); + private static void checkContentType(Blob blob) throws IllegalArgumentException { + checkArgument(blob.contentType() != null, "Blob content type can not be null"); } } /** * Create a new bucket. * - * @return a complete bucket information + * @return a complete bucket * @throws StorageException upon failure */ - BucketInfo create(BucketInfo bucketInfo, BucketTargetOption... options); + Bucket create(Bucket bucket, BucketTargetOption... options); /** * Create a new blob with no content. * - * @return a complete blob information + * @return a complete blob * @throws StorageException upon failure */ - BlobInfo create(BlobInfo blobInfo, BlobTargetOption... options); + Blob create(Blob blob, BlobTargetOption... options); /** * Create a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. MD5 and CRC32C hashes of * {@code content} are computed and used for validating transferred data. * - * @return a complete blob information + * @return a complete blob * @throws StorageException upon failure * @see Hashes and ETags */ - BlobInfo create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options); + Blob create(Blob blob, byte[] content, BlobTargetOption... options); /** * Create a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. By default any md5 and crc32c - * values in the given {@code blobInfo} are ignored unless requested via the + * values in the given {@code blob} are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. * - * @return a complete blob information + * @return a complete blob * @throws StorageException upon failure */ - BlobInfo create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options); + Blob create(Blob blob, InputStream content, BlobWriteOption... options); /** * Return the requested bucket or {@code null} if not found. * * @throws StorageException upon failure */ - BucketInfo get(String bucket, BucketGetOption... options); + Bucket get(String bucket, BucketGetOption... options); /** * Return the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(String bucket, String blob, BlobGetOption... options); + Blob get(String bucket, String blob, BlobGetOption... options); /** * Return the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(BlobId blob, BlobGetOption... options); + Blob get(BlobId blob, BlobGetOption... options); /** * Return the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(BlobId blob); + Blob get(BlobId blob); /** * List the project's buckets. * * @throws StorageException upon failure */ - Page list(BucketListOption... options); + Page list(BucketListOption... options); /** * List the bucket's blobs. * * @throws StorageException upon failure */ - Page list(String bucket, BlobListOption... options); + Page list(String bucket, BlobListOption... options); /** * Update bucket information. @@ -1291,37 +1291,41 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * @return the updated bucket * @throws StorageException upon failure */ - BucketInfo update(BucketInfo bucketInfo, BucketTargetOption... options); + Bucket update(Bucket bucket, BucketTargetOption... options); /** * Update blob information. Original metadata are merged with metadata in the provided - * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata - * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. + * {@code blob}. To replace metadata instead you first have to unset them. Unsetting metadata + * can be done by setting the provided {@code blob}'s metadata to {@code null}. * *

Example usage of replacing blob's metadata: - *

    {@code service.update(BlobInfo.builder("bucket", "name").metadata(null).build());}
-   *    {@code service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());}
+   * 
    {@code service.update(
+   *        Blob.builder(storage, "bucket", "name").metadata(null).build());}
+   *    {@code service.update(
+   *        Blob.builder(storage, "bucket", "name").metadata(newMetadata).build());}
    * 
* * @return the updated blob * @throws StorageException upon failure */ - BlobInfo update(BlobInfo blobInfo, BlobTargetOption... options); + Blob update(Blob blob, BlobTargetOption... options); /** * Update blob information. Original metadata are merged with metadata in the provided - * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata - * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. + * {@code blob}. To replace metadata instead you first have to unset them. Unsetting metadata + * can be done by setting the provided {@code blob}'s metadata to {@code null}. * *

Example usage of replacing blob's metadata: - *

    {@code service.update(BlobInfo.builder("bucket", "name").metadata(null).build());}
-   *    {@code service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());}
+   * 
    {@code service.update(
+   *        Blob.builder(storage, "bucket", "name").metadata(null).build());}
+   *    {@code service.update(
+   *        Blob.builder(storage, "bucket", "name").metadata(newMetadata).build());}
    * 
* * @return the updated blob * @throws StorageException upon failure */ - BlobInfo update(BlobInfo blobInfo); + Blob update(Blob blob); /** * Delete the requested bucket. @@ -1361,7 +1365,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * @return the composed blob * @throws StorageException upon failure */ - BlobInfo compose(ComposeRequest composeRequest); + Blob compose(ComposeRequest composeRequest); /** * Sends a copy request. Returns a {@link CopyWriter} object for the provided @@ -1372,14 +1376,14 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * might issue multiple RPC calls depending on blob's size. * *

Example usage of copy: - *

    {@code BlobInfo blob = service.copy(copyRequest).result();}
+   * 
    {@code Blob blob = service.copy(copyRequest).result();}
    * 
* To explicitly issue chunk copy requests use {@link CopyWriter#copyChunk()} instead: *
    {@code CopyWriter copyWriter = service.copy(copyRequest);
    *    while (!copyWriter.isDone()) {
    *        copyWriter.copyChunk();
    *    }
-   *    BlobInfo blob = copyWriter.result();
+   *    Blob blob = copyWriter.result();
    * }
    * 
* @@ -1416,7 +1420,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Return a channel for reading the blob's content. The blob's latest generation is read. If the - * blob changes while reading (i.e. {@link BlobInfo#etag()} changes), subsequent calls to + * blob changes while reading (i.e. {@link Blob#etag()} changes), subsequent calls to * {@code blobReadChannel.read(ByteBuffer)} may throw {@link StorageException}. * *

The {@link BlobSourceOption#generationMatch(long)} option can be provided to ensure that @@ -1431,7 +1435,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * Return a channel for reading the blob's content. If {@code blob.generation()} is set * data corresponding to that generation is read. If {@code blob.generation()} is {@code null} * the blob's latest generation is read. If the blob changes while reading (i.e. - * {@link BlobInfo#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)} + * {@link Blob#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)} * may throw {@link StorageException}. * *

The {@link BlobSourceOption#generationMatch()} and @@ -1445,12 +1449,12 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Create a blob and return a channel for writing its content. By default any md5 and crc32c - * values in the given {@code blobInfo} are ignored unless requested via the + * values in the given {@code blob} are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. * * @throws StorageException upon failure */ - WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); + WriteChannel writer(Blob blob, BlobWriteOption... options); /** * Generates a signed URL for a blob. @@ -1462,41 +1466,41 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * *

Example usage of creating a signed URL that is valid for 2 weeks: *

   {@code
-   *     service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS);
+   *     service.signUrl(Blob.builder(service, "bucket", "name").build(), 14, TimeUnit.DAYS);
    * }
* - * @param blobInfo the blob associated with the signed URL + * @param blob the blob associated with the signed URL * @param duration time until the signed URL expires, expressed in {@code unit}. The finer * granularity supported is 1 second, finer granularities will be truncated * @param unit time unit of the {@code duration} parameter * @param options optional URL signing options * @see Signed-URLs */ - URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options); + URL signUrl(Blob blob, long duration, TimeUnit unit, SignUrlOption... options); /** * Gets the requested blobs. A batch request is used to perform this call. * * @param blobIds blobs to get - * @return an immutable list of {@code BlobInfo} objects. If a blob does not exist or access to it + * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it * has been denied the corresponding item in the list is {@code null}. * @throws StorageException upon failure */ - List get(BlobId... blobIds); + List get(BlobId... blobIds); /** * Updates the requested blobs. A batch request is used to perform this call. Original metadata - * are merged with metadata in the provided {@code BlobInfo} objects. To replace metadata instead + * are merged with metadata in the provided {@code Blob} objects. To replace metadata instead * you first have to unset them. Unsetting metadata can be done by setting the provided - * {@code BlobInfo} objects metadata to {@code null}. See - * {@link #update(com.google.gcloud.storage.BlobInfo)} for a code example. + * {@code Blob} objects metadata to {@code null}. See + * {@link #update(com.google.gcloud.storage.Blob)} for a code example. * - * @param blobInfos blobs to update - * @return an immutable list of {@code BlobInfo} objects. If a blob does not exist or access to it + * @param blobs blobs to update + * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it * has been denied the corresponding item in the list is {@code null}. * @throws StorageException upon failure */ - List update(BlobInfo... blobInfos); + List update(Blob... blobs); /** * Deletes the requested blobs. A batch request is used to perform this call. diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index a6c851d0f638..71f8b92eab31 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -33,7 +33,6 @@ import com.google.api.services.storage.model.StorageObject; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.common.base.Function; -import com.google.common.base.Functions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; @@ -108,11 +107,13 @@ public RetryResult beforeEval(Exception exception) { } @Override - public BucketInfo create(BucketInfo bucketInfo, BucketTargetOption... options) { - final com.google.api.services.storage.model.Bucket bucketPb = bucketInfo.toPb(); - final Map optionsMap = optionMap(bucketInfo, options); + public Bucket create(Bucket bucket, BucketTargetOption... options) { + final com.google.api.services.storage.model.Bucket bucketPb = bucket.toPb(); + final Map optionsMap = optionMap(bucket, options); try { - return BucketInfo.fromPb(runWithRetries( + return Bucket.fromPb( + this, + runWithRetries( new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { @@ -125,36 +126,40 @@ public com.google.api.services.storage.model.Bucket call() { } @Override - public BlobInfo create(BlobInfo blobInfo, BlobTargetOption... options) { - BlobInfo updatedInfo = blobInfo.toBuilder() + public Blob create(Blob blob, BlobTargetOption... options) { + Blob updated = + blob.toBuilder() .md5(EMPTY_BYTE_ARRAY_MD5) .crc32c(EMPTY_BYTE_ARRAY_CRC32C) .build(); - return create(updatedInfo, new ByteArrayInputStream(EMPTY_BYTE_ARRAY), options); + return create(updated, new ByteArrayInputStream(EMPTY_BYTE_ARRAY), options); } @Override - public BlobInfo create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { + public Blob create(Blob blob, byte[] content, BlobTargetOption... options) { content = firstNonNull(content, EMPTY_BYTE_ARRAY); - BlobInfo updatedInfo = blobInfo.toBuilder() + Blob updated = + blob.toBuilder() .md5(BaseEncoding.base64().encode(Hashing.md5().hashBytes(content).asBytes())) .crc32c(BaseEncoding.base64().encode( Ints.toByteArray(Hashing.crc32c().hashBytes(content).asInt()))) .build(); - return create(updatedInfo, new ByteArrayInputStream(content), options); + return create(updated, new ByteArrayInputStream(content), options); } @Override - public BlobInfo create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { - Tuple targetOptions = BlobTargetOption.convert(blobInfo, options); + public Blob create(Blob blob, InputStream content, BlobWriteOption... options) { + Tuple targetOptions = BlobTargetOption.convert(blob, options); return create(targetOptions.x(), content, targetOptions.y()); } - private BlobInfo create(BlobInfo info, final InputStream content, BlobTargetOption... options) { - final StorageObject blobPb = info.toPb(); - final Map optionsMap = optionMap(info, options); + private Blob create(Blob blob, final InputStream content, BlobTargetOption... options) { + final StorageObject blobPb = blob.toPb(); + final Map optionsMap = optionMap(blob, options); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + return Blob.fromPb( + this, + runWithRetries(new Callable() { @Override public StorageObject call() { return storageRpc.create(blobPb, @@ -167,8 +172,8 @@ public StorageObject call() { } @Override - public BucketInfo get(String bucket, BucketGetOption... options) { - final com.google.api.services.storage.model.Bucket bucketPb = BucketInfo.of(bucket).toPb(); + public Bucket get(String bucket, BucketGetOption... options) { + final com.google.api.services.storage.model.Bucket bucketPb = Bucket.of(this, bucket).toPb(); final Map optionsMap = optionMap(options); try { com.google.api.services.storage.model.Bucket answer = runWithRetries( @@ -178,19 +183,19 @@ public com.google.api.services.storage.model.Bucket call() { return storageRpc.get(bucketPb, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : BucketInfo.fromPb(answer); + return answer == null ? null : Bucket.fromPb(this, answer); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } } @Override - public BlobInfo get(String bucket, String blob, BlobGetOption... options) { + public Blob get(String bucket, String blob, BlobGetOption... options) { return get(BlobId.of(bucket, blob), options); } @Override - public BlobInfo get(BlobId blob, BlobGetOption... options) { + public Blob get(BlobId blob, BlobGetOption... options) { final StorageObject storedObject = blob.toPb(); final Map optionsMap = optionMap(blob, options); try { @@ -200,18 +205,18 @@ public StorageObject call() { return storageRpc.get(storedObject, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return storageObject == null ? null : BlobInfo.fromPb(storageObject); + return storageObject == null ? null : Blob.fromPb(this, storageObject); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } } @Override - public BlobInfo get(BlobId blob) { + public Blob get(BlobId blob) { return get(blob, new BlobGetOption[0]); } - private static class BucketPageFetcher implements NextPageFetcher { + private static class BucketPageFetcher implements NextPageFetcher { private static final long serialVersionUID = 5850406828803613729L; private final Map requestOptions; @@ -226,12 +231,12 @@ private static class BucketPageFetcher implements NextPageFetcher { } @Override - public Page nextPage() { + public Page nextPage() { return listBuckets(serviceOptions, requestOptions); } } - private static class BlobPageFetcher implements NextPageFetcher { + private static class BlobPageFetcher implements NextPageFetcher { private static final long serialVersionUID = 81807334445874098L; private final Map requestOptions; @@ -247,22 +252,23 @@ private static class BlobPageFetcher implements NextPageFetcher { } @Override - public Page nextPage() { + public Page nextPage() { return listBlobs(bucket, serviceOptions, requestOptions); } } @Override - public Page list(BucketListOption... options) { + public Page list(BucketListOption... options) { return listBuckets(options(), optionMap(options)); } @Override - public Page list(final String bucket, BlobListOption... options) { + public Page list(final String bucket, BlobListOption... options) { return listBlobs(bucket, options(), optionMap(options)); } - private static Page listBuckets(final StorageOptions serviceOptions, + private static Page listBuckets( + final StorageOptions serviceOptions, final Map optionsMap) { try { Tuple> result = runWithRetries( @@ -273,12 +279,14 @@ public Tuple> cal } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable buckets = - result.y() == null ? ImmutableList.of() : Iterables.transform(result.y(), - new Function() { + final Storage storage = serviceOptions.service(); + Iterable buckets = + result.y() == null + ? ImmutableList.of() : Iterables.transform( + result.y(), new Function() { @Override - public BucketInfo apply(com.google.api.services.storage.model.Bucket bucketPb) { - return BucketInfo.fromPb(bucketPb); + public Bucket apply(com.google.api.services.storage.model.Bucket bucketPb) { + return Bucket.fromPb(storage, bucketPb); } }); return new PageImpl<>(new BucketPageFetcher(serviceOptions, cursor, optionsMap), cursor, @@ -288,7 +296,7 @@ public BucketInfo apply(com.google.api.services.storage.model.Bucket bucketPb) { } } - private static Page listBlobs(final String bucket, + private static Page listBlobs(final String bucket, final StorageOptions serviceOptions, final Map optionsMap) { try { Tuple> result = runWithRetries( @@ -299,12 +307,14 @@ public Tuple> call() { } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable blobs = - result.y() == null ? ImmutableList.of() : Iterables.transform(result.y(), - new Function() { + final Storage storage = serviceOptions.service(); + Iterable blobs = + result.y() == null + ? ImmutableList.of() + : Iterables.transform(result.y(), new Function() { @Override - public BlobInfo apply(StorageObject storageObject) { - return BlobInfo.fromPb(storageObject); + public Blob apply(StorageObject storageObject) { + return Blob.fromPb(storage, storageObject); } }); return new PageImpl<>(new BlobPageFetcher(bucket, serviceOptions, cursor, optionsMap), @@ -316,11 +326,13 @@ public BlobInfo apply(StorageObject storageObject) { } @Override - public BucketInfo update(BucketInfo bucketInfo, BucketTargetOption... options) { - final com.google.api.services.storage.model.Bucket bucketPb = bucketInfo.toPb(); - final Map optionsMap = optionMap(bucketInfo, options); + public Bucket update(Bucket bucket, BucketTargetOption... options) { + final com.google.api.services.storage.model.Bucket bucketPb = bucket.toPb(); + final Map optionsMap = optionMap(bucket, options); try { - return BucketInfo.fromPb(runWithRetries( + return Bucket.fromPb( + this, + runWithRetries( new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { @@ -333,11 +345,13 @@ public com.google.api.services.storage.model.Bucket call() { } @Override - public BlobInfo update(BlobInfo blobInfo, BlobTargetOption... options) { - final StorageObject storageObject = blobInfo.toPb(); - final Map optionsMap = optionMap(blobInfo, options); + public Blob update(Blob blob, BlobTargetOption... options) { + final StorageObject storageObject = blob.toPb(); + final Map optionsMap = optionMap(blob, options); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + return Blob.fromPb( + this, + runWithRetries(new Callable() { @Override public StorageObject call() { return storageRpc.patch(storageObject, optionsMap); @@ -349,13 +363,13 @@ public StorageObject call() { } @Override - public BlobInfo update(BlobInfo blobInfo) { - return update(blobInfo, new BlobTargetOption[0]); + public Blob update(Blob blob) { + return update(blob, new BlobTargetOption[0]); } @Override public boolean delete(String bucket, BucketSourceOption... options) { - final com.google.api.services.storage.model.Bucket bucketPb = BucketInfo.of(bucket).toPb(); + final com.google.api.services.storage.model.Bucket bucketPb = Bucket.of(this, bucket).toPb(); final Map optionsMap = optionMap(options); try { return runWithRetries(new Callable() { @@ -396,19 +410,22 @@ public boolean delete(BlobId blob) { } @Override - public BlobInfo compose(final ComposeRequest composeRequest) { + public Blob compose(final ComposeRequest composeRequest) { final List sources = Lists.newArrayListWithCapacity(composeRequest.sourceBlobs().size()); for (ComposeRequest.SourceBlob sourceBlob : composeRequest.sourceBlobs()) { - sources.add(BlobInfo.builder( - BlobId.of(composeRequest.target().bucket(), sourceBlob.name(), sourceBlob.generation())) - .build().toPb()); + sources.add(Blob.builder(this, BlobId.of(composeRequest.target().bucket(), sourceBlob.name(), + sourceBlob.generation())) + .build() + .toPb()); } final StorageObject target = composeRequest.target().toPb(); final Map targetOptions = optionMap(composeRequest.target().generation(), composeRequest.target().metageneration(), composeRequest.targetOptions()); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + return Blob.fromPb( + this, + runWithRetries(new Callable() { @Override public StorageObject call() { return storageRpc.compose(sources, target, targetOptions); @@ -474,12 +491,12 @@ public BatchResponse apply(BatchRequest batchRequest) { } List>> toUpdate = Lists.newArrayListWithCapacity(batchRequest.toUpdate().size()); - for (Map.Entry> entry : + for (Map.Entry> entry : batchRequest.toUpdate().entrySet()) { - BlobInfo blobInfo = entry.getKey(); + Blob blob = entry.getKey(); Map optionsMap = - optionMap(blobInfo.generation(), blobInfo.metageneration(), entry.getValue()); - toUpdate.add(Tuple.>of(blobInfo.toPb(), optionsMap)); + optionMap(blob.generation(), blob.metageneration(), entry.getValue()); + toUpdate.add(Tuple.>of(blob.toPb(), optionsMap)); } List>> toGet = Lists.newArrayListWithCapacity(batchRequest.toGet().size()); @@ -490,18 +507,27 @@ public BatchResponse apply(BatchRequest batchRequest) { } StorageRpc.BatchResponse response = storageRpc.batch(new StorageRpc.BatchRequest(toDelete, toUpdate, toGet)); - List> deletes = transformBatchResult( - toDelete, response.deletes, Functions.identity()); - List> updates = transformBatchResult( - toUpdate, response.updates, BlobInfo.FROM_PB_FUNCTION); - List> gets = transformBatchResult( - toGet, response.gets, BlobInfo.FROM_PB_FUNCTION); + List> deletes = + transformBatchResult(toDelete, response.deletes, DELETE_FUNCTION); + List> updates = + transformBatchResult(toUpdate, response.updates, Blob.FROM_PB_FUNCTION); + List> gets = + transformBatchResult(toGet, response.gets, Blob.FROM_PB_FUNCTION); return new BatchResponse(deletes, updates, gets); } + private static final Function, Boolean> DELETE_FUNCTION = + new Function, Boolean>() { + @Override + public Boolean apply(Tuple tuple) { + return tuple.y(); + } + }; + private List> transformBatchResult( Iterable>> request, - Map> results, Function transform) { + Map> results, + Function, O> transform) { List> response = Lists.newArrayListWithCapacity(results.size()); for (Tuple tuple : request) { Tuple result = results.get(tuple.x()); @@ -510,8 +536,10 @@ private List> transformBatch if (exception != null) { response.add(new BatchResponse.Result(exception)); } else { - response.add(object != null - ? BatchResponse.Result.of(transform.apply(object)) : BatchResponse.Result.empty()); + response.add( + object != null + ? BatchResponse.Result.of(transform.apply(Tuple.of((Storage) this, object))) + : BatchResponse.Result.empty()); } } return response; @@ -530,18 +558,19 @@ public ReadChannel reader(BlobId blob, BlobSourceOption... options) { } @Override - public BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { - Tuple targetOptions = BlobTargetOption.convert(blobInfo, options); + public BlobWriteChannel writer(Blob blob, BlobWriteOption... options) { + Tuple targetOptions = BlobTargetOption.convert(blob, options); return writer(targetOptions.x(), targetOptions.y()); } - private BlobWriteChannel writer(BlobInfo blobInfo, BlobTargetOption... options) { - final Map optionsMap = optionMap(blobInfo, options); - return new BlobWriteChannel(options(), blobInfo, optionsMap); + + private BlobWriteChannel writer(Blob blob, BlobTargetOption... options) { + final Map optionsMap = optionMap(blob, options); + return new BlobWriteChannel(blob, optionsMap); } @Override - public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { + public URL signUrl(Blob blob, long duration, TimeUnit unit, SignUrlOption... options) { EnumMap optionMap = Maps.newEnumMap(SignUrlOption.Option.class); for (SignUrlOption option : options) { optionMap.put(option.option(), option.value()); @@ -565,30 +594,30 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio } stBuilder.append('\n'); if (firstNonNull((Boolean) optionMap.get(SignUrlOption.Option.MD5), false)) { - checkArgument(blobInfo.md5() != null, "Blob is missing a value for md5"); - stBuilder.append(blobInfo.md5()); + checkArgument(blob.md5() != null, "Blob is missing a value for md5"); + stBuilder.append(blob.md5()); } stBuilder.append('\n'); if (firstNonNull((Boolean) optionMap.get(SignUrlOption.Option.CONTENT_TYPE), false)) { - checkArgument(blobInfo.contentType() != null, "Blob is missing a value for content-type"); - stBuilder.append(blobInfo.contentType()); + checkArgument(blob.contentType() != null, "Blob is missing a value for content-type"); + stBuilder.append(blob.contentType()); } stBuilder.append('\n'); long expiration = TimeUnit.SECONDS.convert( options().clock().millis() + unit.toMillis(duration), TimeUnit.MILLISECONDS); stBuilder.append(expiration).append('\n'); StringBuilder path = new StringBuilder(); - if (!blobInfo.bucket().startsWith("/")) { + if (!blob.bucket().startsWith("/")) { path.append('/'); } - path.append(blobInfo.bucket()); - if (!blobInfo.bucket().endsWith("/")) { + path.append(blob.bucket()); + if (!blob.bucket().endsWith("/")) { path.append('/'); } - if (blobInfo.name().startsWith("/")) { + if (blob.name().startsWith("/")) { path.setLength(stBuilder.length() - 1); } - path.append(blobInfo.name()); + path.append(blob.name()); stBuilder.append(path); try { Signature signer = Signature.getInstance("SHA256withRSA"); @@ -609,7 +638,7 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio } @Override - public List get(BlobId... blobIds) { + public List get(BlobId... blobIds) { BatchRequest.Builder requestBuilder = BatchRequest.builder(); for (BlobId blob : blobIds) { requestBuilder.get(blob); @@ -619,10 +648,10 @@ public List get(BlobId... blobIds) { } @Override - public List update(BlobInfo... blobInfos) { + public List update(Blob... blobs) { BatchRequest.Builder requestBuilder = BatchRequest.builder(); - for (BlobInfo blobInfo : blobInfos) { - requestBuilder.update(blobInfo); + for (Blob blob : blobs) { + requestBuilder.update(blob); } BatchResponse response = apply(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.updates(), null)); @@ -705,12 +734,12 @@ private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.O return optionMap(generation, metaGeneration, Arrays.asList(options)); } - private Map optionMap(BucketInfo bucketInfo, Option... options) { - return optionMap(null, bucketInfo.metageneration(), options); + private Map optionMap(Bucket bucket, Option... options) { + return optionMap(null, bucket.metageneration(), options); } - private Map optionMap(BlobInfo blobInfo, Option... options) { - return optionMap(blobInfo.generation(), blobInfo.metageneration(), options); + private Map optionMap(Blob blob, Option... options) { + return optionMap(blob.generation(), blob.metageneration(), options); } private Map optionMap(BlobId blobId, Option... options) { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java index fda14ea2e808..97e05df906ca 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java @@ -23,8 +23,8 @@ * BlobId blobId = BlobId.of("bucket", "blob_name"); * Blob blob = Blob.get(storage, blobId); * if (blob == null) { - * BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); - * storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); + * blob = Blob.builder(storage, blobId).contentType("text/plain").build(); + * blob.create("Hello, Cloud Storage!".getBytes(UTF_8)); * } else { * System.out.println("Updating content for " + blobId.name()); * byte[] prevContent = blob.content(); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java index 024aa04eba1b..c940ca2f71a5 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java @@ -18,7 +18,7 @@ import com.google.gcloud.AuthCredentials; import com.google.gcloud.RetryParams; -import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.StorageException; import com.google.gcloud.storage.StorageOptions; @@ -173,8 +173,8 @@ public DeleteBucketTask(Storage storage, String bucket) { @Override public Boolean call() { while (true) { - for (BlobInfo info : storage.list(bucket).values()) { - storage.delete(bucket, info.name()); + for (Blob blob : storage.list(bucket).values()) { + storage.delete(bucket, blob.name()); } try { storage.delete(bucket); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java index 8afdd8a9660d..8eba1dbdbfbc 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java @@ -24,7 +24,7 @@ * RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); * Storage storage = gcsHelper.options().service(); * String bucket = RemoteGcsHelper.generateBucketName(); - * storage.create(BucketInfo.of(bucket)); + * Bucket.of(storage, bucket).create(); * }
* *

After the test: diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java index 63972ff85dfd..3b303b8202a1 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java @@ -27,6 +27,8 @@ import com.google.gcloud.storage.Storage.BlobSourceOption; import com.google.gcloud.storage.Storage.BlobTargetOption; +import org.easymock.EasyMock; +import org.junit.Before; import org.junit.Test; import java.util.Iterator; @@ -34,14 +36,25 @@ public class BatchRequestTest { + private Storage storage; + + @Before + public void setUp() { + storage = EasyMock.createMock(Storage.class); + EasyMock.expect(storage.options()).andReturn(null).anyTimes(); + EasyMock.replay(storage); + } + @Test public void testBatchRequest() { BatchRequest request = BatchRequest.builder() .delete(BlobId.of("b1", "o1", 1L), BlobSourceOption.generationMatch()) .delete("b1", "o2", BlobSourceOption.generationMatch(1), BlobSourceOption.metagenerationMatch(2)) - .update(BlobInfo.builder("b2", "o1").build(), BlobTargetOption.predefinedAcl(PUBLIC_READ)) - .update(BlobInfo.builder("b2", "o2").build()) + .update( + Blob.builder(storage, "b2", "o1").build(), + BlobTargetOption.predefinedAcl(PUBLIC_READ)) + .update(Blob.builder(storage, "b2", "o2").build()) .get(BlobId.of("b3", "o1", 1L), BlobGetOption.generationMatch()) .get("b3", "o2", BlobGetOption.generationMatch(1)) .get("b3", "o3") @@ -61,15 +74,15 @@ public void testBatchRequest() { Iterables.get(delete.getValue(), 1, null)); assertFalse(deletes.hasNext()); - Iterator>> updates = request - .toUpdate().entrySet().iterator(); - Entry> update = updates.next(); - assertEquals(BlobInfo.builder("b2", "o1").build(), update.getKey()); + Iterator>> updates = + request.toUpdate().entrySet().iterator(); + Entry> update = updates.next(); + assertEquals(Blob.builder(storage, "b2", "o1").build(), update.getKey()); assertEquals(1, Iterables.size(update.getValue())); assertEquals(BlobTargetOption.predefinedAcl(PUBLIC_READ), Iterables.getFirst(update.getValue(), null)); update = updates.next(); - assertEquals(BlobInfo.builder("b2", "o2").build(), update.getKey()); + assertEquals(Blob.builder(storage, "b2", "o2").build(), update.getKey()); assertTrue(Iterables.isEmpty(update.getValue())); assertFalse(updates.hasNext()); @@ -93,40 +106,40 @@ public void testEquals() { BatchRequest request = BatchRequest.builder() .delete("b1", "o1") .delete("b1", "o2") - .update(BlobInfo.builder("b2", "o1").build()) - .update(BlobInfo.builder("b2", "o2").build()) + .update(Blob.builder(storage, "b2", "o1").build()) + .update(Blob.builder(storage, "b2", "o2").build()) .get("b3", "o1") .get("b3", "o2") .build(); BatchRequest requestEquals = BatchRequest.builder() .delete("b1", "o1") .delete("b1", "o2") - .update(BlobInfo.builder("b2", "o1").build()) - .update(BlobInfo.builder("b2", "o2").build()) + .update(Blob.builder(storage, "b2", "o1").build()) + .update(Blob.builder(storage, "b2", "o2").build()) .get("b3", "o1") .get("b3", "o2") .build(); BatchRequest requestNotEquals1 = BatchRequest.builder() .delete("b1", "o1") .delete("b1", "o3") - .update(BlobInfo.builder("b2", "o1").build()) - .update(BlobInfo.builder("b2", "o2").build()) + .update(Blob.builder(storage, "b2", "o1").build()) + .update(Blob.builder(storage, "b2", "o2").build()) .get("b3", "o1") .get("b3", "o2") .build(); BatchRequest requestNotEquals2 = BatchRequest.builder() .delete("b1", "o1") .delete("b1", "o2") - .update(BlobInfo.builder("b2", "o1").build()) - .update(BlobInfo.builder("b2", "o3").build()) + .update(Blob.builder(storage, "b2", "o1").build()) + .update(Blob.builder(storage, "b2", "o3").build()) .get("b3", "o1") .get("b3", "o2") .build(); BatchRequest requestNotEquals3 = BatchRequest.builder() .delete("b1", "o1") .delete("b1", "o2") - .update(BlobInfo.builder("b2", "o1").build()) - .update(BlobInfo.builder("b2", "o2").build()) + .update(Blob.builder(storage, "b2", "o1").build()) + .update(Blob.builder(storage, "b2", "o2").build()) .get("b3", "o1") .get("b3", "o3") .build(); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java index 5985329e0183..d301615f4818 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java @@ -22,22 +22,35 @@ import com.google.common.collect.ImmutableList; import com.google.gcloud.storage.BatchResponse.Result; +import org.easymock.EasyMock; +import org.junit.Before; import org.junit.Test; import java.util.List; public class BatchResponseTest { - private static final BlobInfo BLOB_INFO_1 = BlobInfo.builder("b", "o1").build(); - private static final BlobInfo BLOB_INFO_2 = BlobInfo.builder("b", "o2").build(); - private static final BlobInfo BLOB_INFO_3 = BlobInfo.builder("b", "o3").build(); + private Storage storage; + private Blob blob1; + private Blob blob2; + private Blob blob3; + + @Before + public void setUp() { + storage = EasyMock.createMock(Storage.class); + EasyMock.expect(storage.options()).andReturn(null).anyTimes(); + EasyMock.replay(storage); + blob1 = Blob.builder(storage, "b", "o1").build(); + blob2 = Blob.builder(storage, "b", "o2").build(); + blob3 = Blob.builder(storage, "b", "o3").build(); + } + @Test public void testBatchResponse() { List> deletes = ImmutableList.of(Result.of(true), Result.of(false)); - List> updates = - ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); - List> gets = ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3)); + List> updates = ImmutableList.of(Result.of(blob1), Result.of(blob2)); + List> gets = ImmutableList.of(Result.of(blob2), Result.of(blob3)); BatchResponse response = new BatchResponse(deletes, updates, gets); assertEquals(deletes, response.deletes()); assertEquals(updates, response.updates()); @@ -47,14 +60,11 @@ public void testBatchResponse() { @Test public void testEquals() { List> deletes = ImmutableList.of(Result.of(true), Result.of(false)); - List> updates = - ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); - List> gets = ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3)); + List> updates = ImmutableList.of(Result.of(blob1), Result.of(blob2)); + List> gets = ImmutableList.of(Result.of(blob2), Result.of(blob3)); List> otherDeletes = ImmutableList.of(Result.of(false), Result.of(true)); - List> otherUpdates = - ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3)); - List> otherGets = - ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); + List> otherUpdates = ImmutableList.of(Result.of(blob2), Result.of(blob3)); + List> otherGets = ImmutableList.of(Result.of(blob1), Result.of(blob2)); BatchResponse response = new BatchResponse(deletes, updates, gets); BatchResponse responseEquals = new BatchResponse(deletes, updates, gets); BatchResponse responseNotEquals1 = new BatchResponse(otherDeletes, updates, gets); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java deleted file mode 100644 index 36b027dc7278..000000000000 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS; -import static com.google.gcloud.storage.Acl.Role.READER; -import static com.google.gcloud.storage.Acl.Role.WRITER; -import static org.junit.Assert.assertEquals; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.gcloud.storage.Acl.Project; -import com.google.gcloud.storage.Acl.User; - -import org.junit.Test; - -import java.util.List; -import java.util.Map; - -public class BlobInfoTest { - - private static final List ACL = ImmutableList.of( - new Acl(User.ofAllAuthenticatedUsers(), READER), - new Acl(new Project(VIEWERS, "p1"), WRITER)); - private static final Integer COMPONENT_COUNT = 2; - private static final String CONTENT_TYPE = "text/html"; - private static final String CACHE_CONTROL = "cache"; - private static final String CONTENT_DISPOSITION = "content-disposition"; - private static final String CONTENT_ENCODING = "UTF-8"; - private static final String CONTENT_LANGUAGE = "En"; - private static final String CRC32 = "0xFF00"; - private static final Long DELETE_TIME = System.currentTimeMillis(); - private static final String ETAG = "0xFF00"; - private static final Long GENERATION = 1L; - private static final String ID = "B/N:1"; - private static final String MD5 = "0xFF00"; - private static final String MEDIA_LINK = "http://media/b/n"; - private static final Map METADATA = ImmutableMap.of("n1", "v1", "n2", "v2"); - private static final Long META_GENERATION = 10L; - private static final User OWNER = new User("user@gmail.com"); - private static final String SELF_LINK = "http://storage/b/n"; - private static final Long SIZE = 1024L; - private static final Long UPDATE_TIME = DELETE_TIME - 1L; - private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n", GENERATION) - .acl(ACL) - .componentCount(COMPONENT_COUNT) - .contentType(CONTENT_TYPE) - .cacheControl(CACHE_CONTROL) - .contentDisposition(CONTENT_DISPOSITION) - .contentEncoding(CONTENT_ENCODING) - .contentLanguage(CONTENT_LANGUAGE) - .crc32c(CRC32) - .deleteTime(DELETE_TIME) - .etag(ETAG) - .id(ID) - .md5(MD5) - .mediaLink(MEDIA_LINK) - .metadata(METADATA) - .metageneration(META_GENERATION) - .owner(OWNER) - .selfLink(SELF_LINK) - .size(SIZE) - .updateTime(UPDATE_TIME) - .build(); - - @Test - public void testToBuilder() { - compareBlobs(BLOB_INFO, BLOB_INFO.toBuilder().build()); - BlobInfo blobInfo = BLOB_INFO.toBuilder().blobId(BlobId.of("b2", "n2")).size(200L).build(); - assertEquals("n2", blobInfo.name()); - assertEquals("b2", blobInfo.bucket()); - assertEquals(Long.valueOf(200), blobInfo.size()); - blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n", GENERATION)).size(SIZE).build(); - compareBlobs(BLOB_INFO, blobInfo); - } - - @Test - public void testToBuilderIncomplete() { - BlobInfo incompleteBlobInfo = BlobInfo.builder(BlobId.of("b2", "n2")).build(); - compareBlobs(incompleteBlobInfo, incompleteBlobInfo.toBuilder().build()); - } - - @Test - public void testBuilder() { - assertEquals("b", BLOB_INFO.bucket()); - assertEquals("n", BLOB_INFO.name()); - assertEquals(ACL, BLOB_INFO.acl()); - assertEquals(COMPONENT_COUNT, BLOB_INFO.componentCount()); - assertEquals(CONTENT_TYPE, BLOB_INFO.contentType()); - assertEquals(CACHE_CONTROL, BLOB_INFO.cacheControl()); - assertEquals(CONTENT_DISPOSITION, BLOB_INFO.contentDisposition()); - assertEquals(CONTENT_ENCODING, BLOB_INFO.contentEncoding()); - assertEquals(CONTENT_LANGUAGE, BLOB_INFO.contentLanguage()); - assertEquals(CRC32, BLOB_INFO.crc32c()); - assertEquals(DELETE_TIME, BLOB_INFO.deleteTime()); - assertEquals(ETAG, BLOB_INFO.etag()); - assertEquals(GENERATION, BLOB_INFO.generation()); - assertEquals(ID, BLOB_INFO.id()); - assertEquals(MD5, BLOB_INFO.md5()); - assertEquals(MEDIA_LINK, BLOB_INFO.mediaLink()); - assertEquals(METADATA, BLOB_INFO.metadata()); - assertEquals(META_GENERATION, BLOB_INFO.metageneration()); - assertEquals(OWNER, BLOB_INFO.owner()); - assertEquals(SELF_LINK, BLOB_INFO.selfLink()); - assertEquals(SIZE, BLOB_INFO.size()); - assertEquals(UPDATE_TIME, BLOB_INFO.updateTime()); - } - - private void compareBlobs(BlobInfo expected, BlobInfo value) { - assertEquals(expected, value); - assertEquals(expected.bucket(), value.bucket()); - assertEquals(expected.name(), value.name()); - assertEquals(expected.acl(), value.acl()); - assertEquals(expected.componentCount(), value.componentCount()); - assertEquals(expected.contentType(), value.contentType()); - assertEquals(expected.cacheControl(), value.cacheControl()); - assertEquals(expected.contentDisposition(), value.contentDisposition()); - assertEquals(expected.contentEncoding(), value.contentEncoding()); - assertEquals(expected.contentLanguage(), value.contentLanguage()); - assertEquals(expected.crc32c(), value.crc32c()); - assertEquals(expected.deleteTime(), value.deleteTime()); - assertEquals(expected.etag(), value.etag()); - assertEquals(expected.generation(), value.generation()); - assertEquals(expected.id(), value.id()); - assertEquals(expected.md5(), value.md5()); - assertEquals(expected.mediaLink(), value.mediaLink()); - assertEquals(expected.metadata(), value.metadata()); - assertEquals(expected.metageneration(), value.metageneration()); - assertEquals(expected.owner(), value.owner()); - assertEquals(expected.selfLink(), value.selfLink()); - assertEquals(expected.size(), value.size()); - assertEquals(expected.updateTime(), value.updateTime()); - } - - @Test - public void testToPbAndFromPb() { - compareBlobs(BLOB_INFO, BlobInfo.fromPb(BLOB_INFO.toPb())); - BlobInfo blobInfo = BlobInfo.builder(BlobId.of("b", "n")).build(); - compareBlobs(blobInfo, BlobInfo.fromPb(blobInfo.toPb())); - } - - @Test - public void testBlobId() { - assertEquals(BlobId.of("b", "n", GENERATION), BLOB_INFO.blobId()); - } -} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java index 586e7fd0fd39..eb5775f65f90 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java @@ -16,6 +16,9 @@ package com.google.gcloud.storage; +import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS; +import static com.google.gcloud.storage.Acl.Role.READER; +import static com.google.gcloud.storage.Acl.Role.WRITER; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createStrictMock; @@ -31,7 +34,11 @@ import static org.junit.Assert.assertTrue; import com.google.api.client.util.Lists; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.gcloud.ReadChannel; +import com.google.gcloud.storage.Acl.Project; +import com.google.gcloud.storage.Acl.User; import com.google.gcloud.storage.Storage.CopyRequest; import org.easymock.Capture; @@ -42,23 +49,56 @@ import java.net.URL; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; public class BlobTest { - private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").metageneration(42L).build(); + private static final List ACL = ImmutableList.of( + new Acl(User.ofAllAuthenticatedUsers(), READER), new Acl(new Project(VIEWERS, "p1"), WRITER)); + private static final Integer COMPONENT_COUNT = 2; + private static final String CONTENT_TYPE = "text/html"; + private static final String CACHE_CONTROL = "cache"; + private static final String CONTENT_DISPOSITION = "content-disposition"; + private static final String CONTENT_ENCODING = "UTF-8"; + private static final String CONTENT_LANGUAGE = "En"; + private static final String CRC32 = "0xFF00"; + private static final Long DELETE_TIME = System.currentTimeMillis(); + private static final String ETAG = "0xFF00"; + private static final Long GENERATION = 1L; + private static final String ID = "B/N:1"; + private static final String MD5 = "0xFF00"; + private static final String MEDIA_LINK = "http://media/b/n"; + private static final Map METADATA = ImmutableMap.of("n1", "v1", "n2", "v2"); + private static final Long META_GENERATION = 10L; + private static final User OWNER = new User("user@gmail.com"); + private static final String SELF_LINK = "http://storage/b/n"; + private static final Long SIZE = 1024L; + private static final Long UPDATE_TIME = DELETE_TIME - 1L; private static final BlobId[] BLOB_ID_ARRAY = {BlobId.of("b1", "n1"), BlobId.of("b2", "n2"), BlobId.of("b3", "n3")}; - private static final BlobInfo[] BLOB_INFO_ARRAY = {BlobInfo.builder("b1", "n1").build(), - BlobInfo.builder("b2", "n2").build(), BlobInfo.builder("b3", "n3").build()}; + private static final StorageOptions STORAGE_OPTIONS = createMock(StorageOptions.class); + private Blob[] blobArray; + private Blob[] expectedBlobArray; private Storage storage; - private Blob blob; + private Storage simpleStorageMock; + private Blob fullBlob; + private Blob simpleBlob; + private Blob expectedSimpleBlob; @Before public void setUp() throws Exception { storage = createStrictMock(Storage.class); - blob = new Blob(storage, BLOB_INFO); + expect(storage.options()).andReturn(STORAGE_OPTIONS).anyTimes(); + simpleStorageMock = createStrictMock(Storage.class); + expect(simpleStorageMock.options()).andReturn(STORAGE_OPTIONS).anyTimes(); + replay(simpleStorageMock); + expectedSimpleBlob = Blob.builder(simpleStorageMock, "b", "n").metageneration(42L).build(); + expectedBlobArray = new Blob[] { + Blob.builder(simpleStorageMock, "b1", "n1").build(), + Blob.builder(simpleStorageMock, "b2", "n2").build(), + Blob.builder(simpleStorageMock, "b3", "n3").build()}; } @After @@ -66,91 +106,190 @@ public void tearDown() throws Exception { verify(storage); } + private void initializeBlobs() { + fullBlob = + Blob.builder(storage, "b", "n", GENERATION) + .acl(ACL) + .componentCount(COMPONENT_COUNT) + .contentType(CONTENT_TYPE) + .cacheControl(CACHE_CONTROL) + .contentDisposition(CONTENT_DISPOSITION) + .contentEncoding(CONTENT_ENCODING) + .contentLanguage(CONTENT_LANGUAGE) + .crc32c(CRC32) + .deleteTime(DELETE_TIME) + .etag(ETAG) + .id(ID) + .md5(MD5) + .mediaLink(MEDIA_LINK) + .metadata(METADATA) + .metageneration(META_GENERATION) + .owner(OWNER) + .selfLink(SELF_LINK) + .size(SIZE) + .updateTime(UPDATE_TIME) + .build(); + simpleBlob = Blob.builder(storage, "b", "n").metageneration(42L).build(); + blobArray = new Blob[] {Blob.builder(storage, "b1", "n1").build(), + Blob.builder(storage, "b2", "n2").build(), Blob.builder(storage, "b3", "n3").build()}; + } + + @Test + public void testToBuilder() { + replay(storage); + initializeBlobs(); + compareBlobs(fullBlob, fullBlob.toBuilder().build()); + Blob blob = fullBlob.toBuilder().blobId(BlobId.of("b2", "n2")).size(200L).build(); + assertEquals("n2", blob.name()); + assertEquals("b2", blob.bucket()); + assertEquals(Long.valueOf(200), blob.size()); + blob = blob.toBuilder().blobId(BlobId.of("b", "n", GENERATION)).size(SIZE).build(); + compareBlobs(fullBlob, blob); + } + @Test - public void testInfo() throws Exception { - assertEquals(BLOB_INFO, blob.info()); + public void testToBuilderIncomplete() { replay(storage); + initializeBlobs(); + Blob incompleteBlob = Blob.builder(storage, BlobId.of("b2", "n2")).build(); + compareBlobs(incompleteBlob, incompleteBlob.toBuilder().build()); + } + + @Test + public void testBuilder() { + replay(storage); + initializeBlobs(); + assertEquals("b", fullBlob.bucket()); + assertEquals("n", fullBlob.name()); + assertEquals(ACL, fullBlob.acl()); + assertEquals(COMPONENT_COUNT, fullBlob.componentCount()); + assertEquals(CONTENT_TYPE, fullBlob.contentType()); + assertEquals(CACHE_CONTROL, fullBlob.cacheControl()); + assertEquals(CONTENT_DISPOSITION, fullBlob.contentDisposition()); + assertEquals(CONTENT_ENCODING, fullBlob.contentEncoding()); + assertEquals(CONTENT_LANGUAGE, fullBlob.contentLanguage()); + assertEquals(CRC32, fullBlob.crc32c()); + assertEquals(DELETE_TIME, fullBlob.deleteTime()); + assertEquals(ETAG, fullBlob.etag()); + assertEquals(GENERATION, fullBlob.generation()); + assertEquals(ID, fullBlob.id()); + assertEquals(MD5, fullBlob.md5()); + assertEquals(MEDIA_LINK, fullBlob.mediaLink()); + assertEquals(METADATA, fullBlob.metadata()); + assertEquals(META_GENERATION, fullBlob.metageneration()); + assertEquals(OWNER, fullBlob.owner()); + assertEquals(SELF_LINK, fullBlob.selfLink()); + assertEquals(SIZE, fullBlob.size()); + assertEquals(UPDATE_TIME, fullBlob.updateTime()); + assertEquals(STORAGE_OPTIONS, fullBlob.storage().options()); + } + + @Test + public void testToPbAndFromPb() { + replay(storage); + initializeBlobs(); + compareBlobs(fullBlob, Blob.fromPb(storage, fullBlob.toPb())); + Blob blob = Blob.builder(storage, BlobId.of("b", "n")).build(); + compareBlobs(blob, Blob.fromPb(storage, blob.toPb())); + } + + @Test + public void testBlobId() { + replay(storage); + initializeBlobs(); + assertEquals(BlobId.of("b", "n", GENERATION), fullBlob.blobId()); } @Test public void testExists_True() throws Exception { Storage.BlobGetOption[] expectedOptions = {Storage.BlobGetOption.fields()}; - expect(storage.get(BLOB_INFO.blobId(), expectedOptions)).andReturn(BLOB_INFO); + expect(storage.get(expectedSimpleBlob.blobId(), expectedOptions)).andReturn(expectedSimpleBlob); replay(storage); - assertTrue(blob.exists()); + initializeBlobs(); + assertTrue(simpleBlob.exists()); } @Test public void testExists_False() throws Exception { Storage.BlobGetOption[] expectedOptions = {Storage.BlobGetOption.fields()}; - expect(storage.get(BLOB_INFO.blobId(), expectedOptions)).andReturn(null); + expect(storage.get(expectedSimpleBlob.blobId(), expectedOptions)).andReturn(null); replay(storage); - assertFalse(blob.exists()); + initializeBlobs(); + assertFalse(simpleBlob.exists()); } @Test public void testContent() throws Exception { byte[] content = {1, 2}; - expect(storage.readAllBytes(BLOB_INFO.blobId())).andReturn(content); + expect(storage.readAllBytes(expectedSimpleBlob.blobId())).andReturn(content); replay(storage); - assertArrayEquals(content, blob.content()); + initializeBlobs(); + assertArrayEquals(content, simpleBlob.content()); } @Test public void testReload() throws Exception { - BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build(); - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(updatedInfo); + Blob expected = expectedSimpleBlob.toBuilder().cacheControl("c").build(); + expect(storage.get(expectedSimpleBlob.blobId(), new Storage.BlobGetOption[0])) + .andReturn(expected); replay(storage); - Blob updatedBlob = blob.reload(); - assertSame(storage, updatedBlob.storage()); - assertEquals(updatedInfo, updatedBlob.info()); + initializeBlobs(); + Blob updatedBlob = simpleBlob.reload(); + assertSame(simpleStorageMock, updatedBlob.storage()); + assertEquals(expected, updatedBlob); } @Test public void testReloadNull() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null); + expect(storage.get(expectedSimpleBlob.blobId(), new Storage.BlobGetOption[0])).andReturn(null); replay(storage); - assertNull(blob.reload()); + initializeBlobs(); + assertNull(simpleBlob.reload()); } @Test public void testReloadWithOptions() throws Exception { - BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build(); + Blob expected = expectedSimpleBlob.toBuilder().cacheControl("c").build(); Storage.BlobGetOption[] options = {Storage.BlobGetOption.metagenerationMatch(42L)}; - expect(storage.get(BLOB_INFO.blobId(), options)).andReturn(updatedInfo); + expect(storage.get(expectedSimpleBlob.blobId(), options)).andReturn(expected); replay(storage); - Blob updatedBlob = blob.reload(Blob.BlobSourceOption.metagenerationMatch()); - assertSame(storage, updatedBlob.storage()); - assertEquals(updatedInfo, updatedBlob.info()); + initializeBlobs(); + Blob updatedBlob = simpleBlob.reload(Blob.BlobSourceOption.metagenerationMatch()); + assertSame(simpleStorageMock, updatedBlob.storage()); + assertEquals(expected, updatedBlob); } @Test public void testUpdate() throws Exception { - BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build(); - expect(storage.update(updatedInfo, new Storage.BlobTargetOption[0])).andReturn(updatedInfo); + Blob expected = expectedSimpleBlob.toBuilder().cacheControl("c").build(); + expect(storage.update(expected, new Storage.BlobTargetOption[0])).andReturn(expected); replay(storage); - Blob updatedBlob = blob.update(updatedInfo); - assertSame(storage, blob.storage()); - assertEquals(updatedInfo, updatedBlob.info()); + initializeBlobs(); + Blob updatedBlob = simpleBlob.update(expected); + assertSame(storage, simpleBlob.storage()); + assertEquals(expected, updatedBlob); } @Test public void testDelete() throws Exception { - expect(storage.delete(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(true); + expect(storage.delete(expectedSimpleBlob.blobId(), new Storage.BlobSourceOption[0])) + .andReturn(true); replay(storage); - assertTrue(blob.delete()); + initializeBlobs(); + assertTrue(simpleBlob.delete()); } @Test public void testCopyToBucket() throws Exception { - BlobInfo target = BlobInfo.builder(BlobId.of("bt", "n")).build(); CopyWriter copyWriter = createMock(CopyWriter.class); Capture capturedCopyRequest = Capture.newInstance(); expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); replay(storage); - CopyWriter returnedCopyWriter = blob.copyTo("bt"); + initializeBlobs(); + Blob target = Blob.builder(storage, BlobId.of("bt", "n")).build(); + CopyWriter returnedCopyWriter = simpleBlob.copyTo("bt"); assertEquals(copyWriter, returnedCopyWriter); - assertEquals(capturedCopyRequest.getValue().source(), blob.id()); + assertEquals(capturedCopyRequest.getValue().source(), simpleBlob.blobId()); assertEquals(capturedCopyRequest.getValue().target(), target); assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); @@ -158,14 +297,15 @@ public void testCopyToBucket() throws Exception { @Test public void testCopyTo() throws Exception { - BlobInfo target = BlobInfo.builder(BlobId.of("bt", "nt")).build(); CopyWriter copyWriter = createMock(CopyWriter.class); Capture capturedCopyRequest = Capture.newInstance(); expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); replay(storage); - CopyWriter returnedCopyWriter = blob.copyTo("bt", "nt"); + initializeBlobs(); + Blob target = Blob.builder(storage, BlobId.of("bt", "nt")).build(); + CopyWriter returnedCopyWriter = simpleBlob.copyTo("bt", "nt"); assertEquals(copyWriter, returnedCopyWriter); - assertEquals(capturedCopyRequest.getValue().source(), blob.id()); + assertEquals(capturedCopyRequest.getValue().source(), simpleBlob.blobId()); assertEquals(capturedCopyRequest.getValue().target(), target); assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); @@ -175,13 +315,14 @@ public void testCopyTo() throws Exception { public void testCopyToBlobId() throws Exception { BlobId targetId = BlobId.of("bt", "nt"); CopyWriter copyWriter = createMock(CopyWriter.class); - BlobInfo target = BlobInfo.builder(targetId).build(); Capture capturedCopyRequest = Capture.newInstance(); expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); replay(storage); - CopyWriter returnedCopyWriter = blob.copyTo(targetId); + initializeBlobs(); + Blob target = Blob.builder(storage, targetId).build(); + CopyWriter returnedCopyWriter = simpleBlob.copyTo(targetId); assertEquals(copyWriter, returnedCopyWriter); - assertEquals(capturedCopyRequest.getValue().source(), blob.id()); + assertEquals(capturedCopyRequest.getValue().source(), simpleBlob.blobId()); assertEquals(capturedCopyRequest.getValue().target(), target); assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); @@ -189,62 +330,69 @@ public void testCopyToBlobId() throws Exception { @Test public void testReader() throws Exception { - ReadChannel channel = createMock(ReadChannel.class); - expect(storage.reader(BLOB_INFO.blobId())).andReturn(channel); + ReadChannel channel = createMock(BlobReadChannel.class); + expect(storage.reader(expectedSimpleBlob.blobId())).andReturn(channel); replay(storage); - assertSame(channel, blob.reader()); + initializeBlobs(); + assertSame(channel, simpleBlob.reader()); } @Test public void testWriter() throws Exception { BlobWriteChannel channel = createMock(BlobWriteChannel.class); - expect(storage.writer(BLOB_INFO)).andReturn(channel); + expect(storage.writer(expectedSimpleBlob)).andReturn(channel); replay(storage); - assertSame(channel, blob.writer()); + initializeBlobs(); + assertSame(channel, simpleBlob.writer()); } @Test public void testSignUrl() throws Exception { URL url = new URL("http://localhost:123/bla"); - expect(storage.signUrl(BLOB_INFO, 100, TimeUnit.SECONDS)).andReturn(url); + expect(storage.signUrl(expectedSimpleBlob, 100, TimeUnit.SECONDS)).andReturn(url); replay(storage); - assertEquals(url, blob.signUrl(100, TimeUnit.SECONDS)); + initializeBlobs(); + assertEquals(url, simpleBlob.signUrl(100, TimeUnit.SECONDS)); } @Test public void testGetSome() throws Exception { - List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY); - expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList); + List blobList = Arrays.asList(expectedBlobArray); + expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobList); replay(storage); + initializeBlobs(); List result = Blob.get(storage, BLOB_ID_ARRAY[0], BLOB_ID_ARRAY[1], BLOB_ID_ARRAY[2]); - assertEquals(blobInfoList.size(), result.size()); - for (int i = 0; i < blobInfoList.size(); i++) { - assertEquals(blobInfoList.get(i), result.get(i).info()); + assertEquals(blobList.size(), result.size()); + for (int i = 0; i < blobList.size(); i++) { + assertEquals(blobList.get(i), result.get(i)); } } @Test public void testGetSomeList() throws Exception { - List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY); - expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList); + List blobList = Arrays.asList(expectedBlobArray); + expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobList); replay(storage); + initializeBlobs(); List result = Blob.get(storage, Arrays.asList(BLOB_ID_ARRAY)); - assertEquals(blobInfoList.size(), result.size()); - for (int i = 0; i < blobInfoList.size(); i++) { - assertEquals(blobInfoList.get(i), result.get(i).info()); + assertEquals(blobList.size(), result.size()); + for (int i = 0; i < blobList.size(); i++) { + assertEquals(blobList.get(i), result.get(i)); } } @Test public void testGetSomeNull() throws Exception { - List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY[0], null, BLOB_INFO_ARRAY[2]); - expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList); + List blobList = + Arrays.asList(expectedBlobArray[0], null, expectedBlobArray[2]); + expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobList); replay(storage); + initializeBlobs(); List result = Blob.get(storage, BLOB_ID_ARRAY[0], BLOB_ID_ARRAY[1], BLOB_ID_ARRAY[2]); - assertEquals(blobInfoList.size(), result.size()); - for (int i = 0; i < blobInfoList.size(); i++) { - if (blobInfoList.get(i) != null) { - assertEquals(blobInfoList.get(i), result.get(i).info()); + assertEquals(blobList.size(), result.size()); + for (int i = 0; i < blobList.size(); i++) { + if (blobList.get(i) != null) { + assertEquals(blobList.get(i), result.get(i)); } else { assertNull(result.get(i)); } @@ -259,31 +407,33 @@ public void testUpdateNone() throws Exception { @Test public void testUpdateSome() throws Exception { - List blobInfoList = Lists.newArrayListWithCapacity(BLOB_ID_ARRAY.length); - for (BlobInfo info : BLOB_INFO_ARRAY) { - blobInfoList.add(info.toBuilder().contentType("content").build()); + List blobList = Lists.newArrayListWithCapacity(BLOB_ID_ARRAY.length); + for (Blob blob : expectedBlobArray) { + blobList.add(blob.toBuilder().contentType("content").build()); } - expect(storage.update(BLOB_INFO_ARRAY)).andReturn(blobInfoList); + expect(storage.update(expectedBlobArray)).andReturn(blobList); replay(storage); - List result = Blob.update(storage, BLOB_INFO_ARRAY); - assertEquals(blobInfoList.size(), result.size()); - for (int i = 0; i < blobInfoList.size(); i++) { - assertEquals(blobInfoList.get(i), result.get(i).info()); + initializeBlobs(); + List result = Blob.update(storage, blobArray); + assertEquals(blobList.size(), result.size()); + for (int i = 0; i < blobList.size(); i++) { + assertEquals(blobList.get(i), result.get(i)); } } @Test public void testUpdateSomeNull() throws Exception { - List blobInfoList = Arrays.asList( - BLOB_INFO_ARRAY[0].toBuilder().contentType("content").build(), null, - BLOB_INFO_ARRAY[2].toBuilder().contentType("content").build()); - expect(storage.update(BLOB_INFO_ARRAY)).andReturn(blobInfoList); - replay(storage); - List result = Blob.update(storage, BLOB_INFO_ARRAY); - assertEquals(blobInfoList.size(), result.size()); - for (int i = 0; i < blobInfoList.size(); i++) { - if (blobInfoList.get(i) != null) { - assertEquals(blobInfoList.get(i), result.get(i).info()); + List blobList = + Arrays.asList(expectedBlobArray[0].toBuilder().contentType("content").build(), null, + expectedBlobArray[2].toBuilder().contentType("content").build()); + expect(storage.update(expectedBlobArray)).andReturn(blobList); + replay(storage); + initializeBlobs(); + List result = Blob.update(storage, blobArray); + assertEquals(blobList.size(), result.size()); + for (int i = 0; i < blobList.size(); i++) { + if (blobList.get(i) != null) { + assertEquals(blobList.get(i), result.get(i)); } else { assertNull(result.get(i)); } @@ -293,6 +443,7 @@ public void testUpdateSomeNull() throws Exception { @Test public void testDeleteNone() throws Exception { replay(storage); + initializeBlobs(); assertTrue(Blob.delete(storage).isEmpty()); } @@ -301,6 +452,7 @@ public void testDeleteSome() throws Exception { List deleteResult = Arrays.asList(true, true, true); expect(storage.delete(BLOB_ID_ARRAY)).andReturn(deleteResult); replay(storage); + initializeBlobs(); List result = Blob.delete(storage, BLOB_ID_ARRAY); assertEquals(deleteResult.size(), result.size()); for (int i = 0; i < deleteResult.size(); i++) { @@ -310,53 +462,88 @@ public void testDeleteSome() throws Exception { @Test public void testGetFromString() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(BLOB_INFO); + expect(storage.get(expectedSimpleBlob.blobId(), new Storage.BlobGetOption[0])) + .andReturn(expectedSimpleBlob); replay(storage); - Blob loadedBlob = Blob.get(storage, BLOB_INFO.bucket(), BLOB_INFO.name()); - assertEquals(BLOB_INFO, loadedBlob.info()); + initializeBlobs(); + Blob loadedBlob = Blob.get(storage, simpleBlob.bucket(), simpleBlob.name()); + assertEquals(simpleBlob, loadedBlob); } @Test public void testGetFromId() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(BLOB_INFO); + expect(storage.get(expectedSimpleBlob.blobId(), new Storage.BlobGetOption[0])) + .andReturn(expectedSimpleBlob); replay(storage); - Blob loadedBlob = Blob.get(storage, BLOB_INFO.blobId()); + initializeBlobs(); + Blob loadedBlob = Blob.get(storage, simpleBlob.blobId()); assertNotNull(loadedBlob); - assertEquals(BLOB_INFO, loadedBlob.info()); + assertEquals(simpleBlob, loadedBlob); } @Test public void testGetFromStringNull() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null); + expect(storage.get(expectedSimpleBlob.blobId(), new Storage.BlobGetOption[0])).andReturn(null); replay(storage); - assertNull(Blob.get(storage, BLOB_INFO.bucket(), BLOB_INFO.name())); + initializeBlobs(); + assertNull(Blob.get(storage, simpleBlob.bucket(), simpleBlob.name())); } @Test public void testGetFromIdNull() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null); + expect(storage.get(expectedSimpleBlob.blobId(), new Storage.BlobGetOption[0])).andReturn(null); replay(storage); - assertNull(Blob.get(storage, BLOB_INFO.blobId())); + initializeBlobs(); + assertNull(Blob.get(storage, simpleBlob.blobId())); } @Test public void testGetFromStringWithOptions() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), Storage.BlobGetOption.generationMatch(42L))) - .andReturn(BLOB_INFO); + expect(storage.get(expectedSimpleBlob.blobId(), Storage.BlobGetOption.generationMatch(42L))) + .andReturn(expectedSimpleBlob); replay(storage); - Blob loadedBlob = Blob.get(storage, BLOB_INFO.bucket(), BLOB_INFO.name(), + initializeBlobs(); + Blob loadedBlob = Blob.get(storage, simpleBlob.bucket(), simpleBlob.name(), Storage.BlobGetOption.generationMatch(42L)); - assertEquals(BLOB_INFO, loadedBlob.info()); + assertEquals(simpleBlob, loadedBlob); } @Test public void testGetFromIdWithOptions() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), Storage.BlobGetOption.generationMatch(42L))) - .andReturn(BLOB_INFO); + expect(storage.get(expectedSimpleBlob.blobId(), Storage.BlobGetOption.generationMatch(42L))) + .andReturn(expectedSimpleBlob); replay(storage); + initializeBlobs(); Blob loadedBlob = - Blob.get(storage, BLOB_INFO.blobId(), Storage.BlobGetOption.generationMatch(42L)); + Blob.get(storage, simpleBlob.blobId(), Storage.BlobGetOption.generationMatch(42L)); assertNotNull(loadedBlob); - assertEquals(BLOB_INFO, loadedBlob.info()); + assertEquals(simpleBlob, loadedBlob); + } + + private void compareBlobs(Blob expected, Blob value) { + assertEquals(expected, value); + assertEquals(expected.bucket(), value.bucket()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.componentCount(), value.componentCount()); + assertEquals(expected.contentType(), value.contentType()); + assertEquals(expected.cacheControl(), value.cacheControl()); + assertEquals(expected.contentDisposition(), value.contentDisposition()); + assertEquals(expected.contentEncoding(), value.contentEncoding()); + assertEquals(expected.contentLanguage(), value.contentLanguage()); + assertEquals(expected.crc32c(), value.crc32c()); + assertEquals(expected.deleteTime(), value.deleteTime()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.generation(), value.generation()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.md5(), value.md5()); + assertEquals(expected.mediaLink(), value.mediaLink()); + assertEquals(expected.metadata(), value.metadata()); + assertEquals(expected.metageneration(), value.metageneration()); + assertEquals(expected.owner(), value.owner()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.size(), value.size()); + assertEquals(expected.updateTime(), value.updateTime()); + assertEquals(expected.storage().options(), value.storage().options()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java index e499f6b9de52..688a4732060c 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java @@ -54,7 +54,6 @@ public class BlobWriteChannelTest { private static final String BUCKET_NAME = "b"; private static final String BLOB_NAME = "n"; private static final String UPLOAD_ID = "uploadid"; - private static final BlobInfo BLOB_INFO = BlobInfo.builder(BUCKET_NAME, BLOB_NAME).build(); private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); private static final int MIN_CHUNK_SIZE = 256 * 1024; private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; @@ -65,6 +64,7 @@ public class BlobWriteChannelTest { private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; private BlobWriteChannel writer; + private Blob blob; @Before public void setUp() { @@ -78,6 +78,7 @@ public void setUp() { .serviceRpcFactory(rpcFactoryMock) .retryParams(RetryParams.noRetries()) .build(); + blob = Blob.builder(options.service(), BUCKET_NAME, BLOB_NAME).build(); } @After @@ -87,28 +88,28 @@ public void tearDown() throws Exception { @Test public void testCreate() { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); assertTrue(writer.isOpen()); } @Test public void testWriteWithoutFlush() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); } @Test public void testWriteWithFlush() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(CUSTOM_CHUNK_SIZE), eq(false)); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); writer.chunkSize(CUSTOM_CHUNK_SIZE); ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); @@ -117,12 +118,12 @@ public void testWriteWithFlush() throws IOException { @Test public void testWritesAndFlush() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(DEFAULT_CHUNK_SIZE), eq(false)); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; for (int i = 0; i < buffers.length; i++) { buffers[i] = randomBuffer(MIN_CHUNK_SIZE); @@ -138,11 +139,11 @@ public void testWritesAndFlush() throws IOException { @Test public void testCloseWithoutFlush() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); assertTrue(writer.isOpen()); writer.close(); assertArrayEquals(new byte[0], capturedBuffer.getValue()); @@ -151,13 +152,13 @@ public void testCloseWithoutFlush() throws IOException { @Test public void testCloseWithFlush() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(); ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); assertTrue(writer.isOpen()); writer.write(buffer); writer.close(); @@ -168,11 +169,11 @@ public void testCloseWithFlush() throws IOException { @Test public void testWriteClosed() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); writer.close(); try { writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); @@ -184,7 +185,7 @@ public void testWriteClosed() throws IOException { @Test public void testSaveAndRestore() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(CaptureType.ALL); Capture capturedPosition = Capture.newInstance(CaptureType.ALL); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), @@ -193,7 +194,7 @@ public void testSaveAndRestore() throws IOException { replay(storageRpcMock); ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); assertEquals(new Long(0L), capturedPosition.getValues().get(0)); @@ -206,15 +207,15 @@ public void testSaveAndRestore() throws IOException { @Test public void testSaveAndRestoreClosed() throws IOException { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); Capture capturedBuffer = Capture.newInstance(); storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); writer.close(); RestorableState writerState = writer.capture(); RestorableState expectedWriterState = - BlobWriteChannel.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID) + BlobWriteChannel.StateImpl.builder(blob, UPLOAD_ID) .buffer(null) .chunkSize(DEFAULT_CHUNK_SIZE) .isOpen(false) @@ -227,12 +228,12 @@ public void testSaveAndRestoreClosed() throws IOException { @Test public void testStateEquals() { - expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID).times(2); + expect(storageRpcMock.open(blob.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID).times(2); replay(storageRpcMock); - writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); // avoid closing when you don't want partial writes to GCS upon failure @SuppressWarnings("resource") - WriteChannel writer2 = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + WriteChannel writer2 = new BlobWriteChannel(blob, EMPTY_RPC_OPTIONS); RestorableState state = writer.capture(); RestorableState state2 = writer2.capture(); assertEquals(state, state2); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java deleted file mode 100644 index b705685a04b1..000000000000 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import com.google.api.services.storage.model.Bucket.Lifecycle.Rule; -import com.google.common.collect.ImmutableList; -import com.google.gcloud.storage.Acl.Project; -import com.google.gcloud.storage.Acl.Role; -import com.google.gcloud.storage.Acl.User; -import com.google.gcloud.storage.BucketInfo.AgeDeleteRule; -import com.google.gcloud.storage.BucketInfo.CreatedBeforeDeleteRule; -import com.google.gcloud.storage.BucketInfo.DeleteRule; -import com.google.gcloud.storage.BucketInfo.DeleteRule.Type; -import com.google.gcloud.storage.BucketInfo.IsLiveDeleteRule; -import com.google.gcloud.storage.BucketInfo.NumNewerVersionsDeleteRule; -import com.google.gcloud.storage.BucketInfo.RawDeleteRule; - -import org.junit.Test; - -import java.util.Collections; -import java.util.List; - -public class BucketInfoTest { - - private static final List ACL = ImmutableList.of( - new Acl(User.ofAllAuthenticatedUsers(), Role.READER), - new Acl(new Project(VIEWERS, "p1"), Role.WRITER)); - private static final String ETAG = "0xFF00"; - private static final String ID = "B/N:1"; - private static final Long META_GENERATION = 10L; - private static final User OWNER = new User("user@gmail.com"); - private static final String SELF_LINK = "http://storage/b/n"; - private static final Long CREATE_TIME = System.currentTimeMillis(); - private static final List CORS = Collections.singletonList(Cors.builder().build()); - private static final List DEFAULT_ACL = - Collections.singletonList(new Acl(User.ofAllAuthenticatedUsers(), Role.WRITER)); - private static final List DELETE_RULES = - Collections.singletonList(new AgeDeleteRule(5)); - private static final String INDEX_PAGE = "index.html"; - private static final String NOT_FOUND_PAGE = "error.html"; - private static final String LOCATION = "ASIA"; - private static final String STORAGE_CLASS = "STANDARD"; - private static final Boolean VERSIONING_ENABLED = true; - private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b") - .acl(ACL) - .etag(ETAG) - .id(ID) - .metageneration(META_GENERATION) - .owner(OWNER) - .selfLink(SELF_LINK) - .cors(CORS) - .createTime(CREATE_TIME) - .defaultAcl(DEFAULT_ACL) - .deleteRules(DELETE_RULES) - .indexPage(INDEX_PAGE) - .notFoundPage(NOT_FOUND_PAGE) - .location(LOCATION) - .storageClass(STORAGE_CLASS) - .versioningEnabled(VERSIONING_ENABLED) - .build(); - - @Test - public void testToBuilder() { - compareBuckets(BUCKET_INFO, BUCKET_INFO.toBuilder().build()); - BucketInfo bucketInfo = BUCKET_INFO.toBuilder().name("B").id("id").build(); - assertEquals("B", bucketInfo.name()); - assertEquals("id", bucketInfo.id()); - bucketInfo = bucketInfo.toBuilder().name("b").id(ID).build(); - compareBuckets(BUCKET_INFO, bucketInfo); - } - - @Test - public void testToBuilderIncomplete() { - BucketInfo incompleteBucketInfo = BucketInfo.builder("b").build(); - compareBuckets(incompleteBucketInfo, incompleteBucketInfo.toBuilder().build()); - } - - @Test - public void testOf() { - BucketInfo bucketInfo = BucketInfo.of("bucket"); - assertEquals("bucket", bucketInfo.name()); - } - - @Test - public void testBuilder() { - assertEquals("b", BUCKET_INFO.name()); - assertEquals(ACL, BUCKET_INFO.acl()); - assertEquals(ETAG, BUCKET_INFO.etag()); - assertEquals(ID, BUCKET_INFO.id()); - assertEquals(META_GENERATION, BUCKET_INFO.metageneration()); - assertEquals(OWNER, BUCKET_INFO.owner()); - assertEquals(SELF_LINK, BUCKET_INFO.selfLink()); - assertEquals(CREATE_TIME, BUCKET_INFO.createTime()); - assertEquals(CORS, BUCKET_INFO.cors()); - assertEquals(DEFAULT_ACL, BUCKET_INFO.defaultAcl()); - assertEquals(DELETE_RULES, BUCKET_INFO.deleteRules()); - assertEquals(INDEX_PAGE, BUCKET_INFO.indexPage()); - assertEquals(NOT_FOUND_PAGE, BUCKET_INFO.notFoundPage()); - assertEquals(LOCATION, BUCKET_INFO.location()); - assertEquals(STORAGE_CLASS, BUCKET_INFO.storageClass()); - assertEquals(VERSIONING_ENABLED, BUCKET_INFO.versioningEnabled()); - } - - @Test - public void testToPbAndFromPb() { - compareBuckets(BUCKET_INFO, BucketInfo.fromPb(BUCKET_INFO.toPb())); - BucketInfo bucketInfo = BucketInfo.of("b"); - compareBuckets(bucketInfo, BucketInfo.fromPb(bucketInfo.toPb())); - } - - private void compareBuckets(BucketInfo expected, BucketInfo value) { - assertEquals(expected, value); - assertEquals(expected.name(), value.name()); - assertEquals(expected.acl(), value.acl()); - assertEquals(expected.etag(), value.etag()); - assertEquals(expected.id(), value.id()); - assertEquals(expected.metageneration(), value.metageneration()); - assertEquals(expected.owner(), value.owner()); - assertEquals(expected.selfLink(), value.selfLink()); - assertEquals(expected.createTime(), value.createTime()); - assertEquals(expected.cors(), value.cors()); - assertEquals(expected.defaultAcl(), value.defaultAcl()); - assertEquals(expected.deleteRules(), value.deleteRules()); - assertEquals(expected.indexPage(), value.indexPage()); - assertEquals(expected.notFoundPage(), value.notFoundPage()); - assertEquals(expected.location(), value.location()); - assertEquals(expected.storageClass(), value.storageClass()); - assertEquals(expected.versioningEnabled(), value.versioningEnabled()); - } - - @Test - public void testDeleteRules() { - AgeDeleteRule ageRule = new AgeDeleteRule(10); - assertEquals(10, ageRule.daysToLive()); - assertEquals(Type.AGE, ageRule.type()); - CreatedBeforeDeleteRule createBeforeRule = new CreatedBeforeDeleteRule(1); - assertEquals(1, createBeforeRule.timeMillis()); - assertEquals(Type.CREATE_BEFORE, createBeforeRule.type()); - NumNewerVersionsDeleteRule versionsRule = new NumNewerVersionsDeleteRule(2); - assertEquals(2, versionsRule.numNewerVersions()); - assertEquals(Type.NUM_NEWER_VERSIONS, versionsRule.type()); - IsLiveDeleteRule isLiveRule = new IsLiveDeleteRule(true); - assertTrue(isLiveRule.isLive()); - assertEquals(Type.IS_LIVE, isLiveRule.type()); - Rule rule = new Rule().set("a", "b"); - RawDeleteRule rawRule = new RawDeleteRule(rule); - assertEquals(Type.UNKNOWN, rawRule.type()); - ImmutableList rules = ImmutableList - .of(ageRule, createBeforeRule, versionsRule, isLiveRule, rawRule); - for (DeleteRule delRule : rules) { - assertEquals(delRule, DeleteRule.fromPb(delRule.toPb())); - } - } -} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java index e67e7aff17dc..de0b70e64f3f 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java @@ -16,6 +16,7 @@ package com.google.gcloud.storage; +import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.createStrictMock; import static org.easymock.EasyMock.expect; @@ -28,12 +29,24 @@ import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; +import com.google.api.services.storage.model.Bucket.Lifecycle.Rule; import com.google.common.collect.ImmutableList; import com.google.gcloud.Page; import com.google.gcloud.PageImpl; +import com.google.gcloud.storage.Acl.Project; +import com.google.gcloud.storage.Acl.Role; +import com.google.gcloud.storage.Acl.User; import com.google.gcloud.storage.BatchResponse.Result; +import com.google.gcloud.storage.Bucket.AgeDeleteRule; +import com.google.gcloud.storage.Bucket.CreatedBeforeDeleteRule; +import com.google.gcloud.storage.Bucket.DeleteRule; +import com.google.gcloud.storage.Bucket.DeleteRule.Type; +import com.google.gcloud.storage.Bucket.IsLiveDeleteRule; +import com.google.gcloud.storage.Bucket.NumNewerVersionsDeleteRule; +import com.google.gcloud.storage.Bucket.RawDeleteRule; import org.easymock.Capture; +import org.easymock.EasyMock; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -48,20 +61,48 @@ public class BucketTest { - private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b").metageneration(42L).build(); - private static final Iterable BLOB_INFO_RESULTS = ImmutableList.of( - BlobInfo.builder("b", "n1").build(), - BlobInfo.builder("b", "n2").build(), - BlobInfo.builder("b", "n3").build()); +private static final List ACL = ImmutableList.of( + new Acl(User.ofAllAuthenticatedUsers(), Role.READER), + new Acl(new Project(VIEWERS, "p1"), Role.WRITER)); + private static final String ETAG = "0xFF00"; + private static final String ID = "B/N:1"; + private static final Long META_GENERATION = 10L; + private static final User OWNER = new User("user@gmail.com"); + private static final String SELF_LINK = "http://storage/b/n"; + private static final Long CREATE_TIME = System.currentTimeMillis(); + private static final List CORS = Collections.singletonList(Cors.builder().build()); + private static final List DEFAULT_ACL = + Collections.singletonList(new Acl(User.ofAllAuthenticatedUsers(), Role.WRITER)); + private static final List DELETE_RULES = + Collections.singletonList(new AgeDeleteRule(5)); + private static final String INDEX_PAGE = "index.html"; + private static final String NOT_FOUND_PAGE = "error.html"; + private static final String LOCATION = "ASIA"; + private static final String STORAGE_CLASS = "STANDARD"; + private static final Boolean VERSIONING_ENABLED = true; private static final String CONTENT_TYPE = "text/plain"; + private static final StorageOptions STORAGE_OPTIONS = EasyMock.createMock(StorageOptions.class); + private Bucket fullBucket; + private Bucket simpleBucket; + private Bucket expectedSimpleBucket; + private Iterable blobResults; + private Iterable expectedBlobResults; private Storage storage; - private Bucket bucket; + private Storage simpleStorageMock; @Before public void setUp() throws Exception { storage = createStrictMock(Storage.class); - bucket = new Bucket(storage, BUCKET_INFO); + expect(storage.options()).andReturn(STORAGE_OPTIONS).anyTimes(); + simpleStorageMock = createStrictMock(Storage.class); + expect(simpleStorageMock.options()).andReturn(STORAGE_OPTIONS).anyTimes(); + replay(simpleStorageMock); + expectedSimpleBucket = Bucket.builder(simpleStorageMock, "b").metageneration(42L).build(); + expectedBlobResults = ImmutableList.of( + Blob.builder(simpleStorageMock, "b", "n1").build(), + Blob.builder(simpleStorageMock, "b", "n2").build(), + Blob.builder(simpleStorageMock, "b", "n3").build()); } @After @@ -69,195 +110,345 @@ public void tearDown() throws Exception { verify(storage); } + private void initializeObjectsWithServiceDependencies() { + fullBucket = + Bucket.builder(storage, "b") + .acl(ACL) + .etag(ETAG) + .id(ID) + .metageneration(META_GENERATION) + .owner(OWNER) + .selfLink(SELF_LINK) + .cors(CORS) + .createTime(CREATE_TIME) + .defaultAcl(DEFAULT_ACL) + .deleteRules(DELETE_RULES) + .indexPage(INDEX_PAGE) + .notFoundPage(NOT_FOUND_PAGE) + .location(LOCATION) + .storageClass(STORAGE_CLASS) + .versioningEnabled(VERSIONING_ENABLED) + .build(); + simpleBucket = Bucket.builder(storage, "b").metageneration(42L).build(); + blobResults = ImmutableList.of(Blob.builder(storage, "b", "n1").build(), + Blob.builder(storage, "b", "n2").build(), Blob.builder(storage, "b", "n3").build()); + } + + @Test + public void testToBuilder() { + replay(storage); + initializeObjectsWithServiceDependencies(); + compareBuckets(fullBucket, fullBucket.toBuilder().build()); + Bucket bucket = fullBucket.toBuilder().name("B").id("id").build(); + assertEquals("B", bucket.name()); + assertEquals("id", bucket.id()); + bucket = bucket.toBuilder().name("b").id(ID).build(); + compareBuckets(fullBucket, bucket); + } + @Test - public void testInfo() throws Exception { - assertEquals(BUCKET_INFO, bucket.info()); + public void testToBuilderIncomplete() { replay(storage); + initializeObjectsWithServiceDependencies(); + Bucket incompleteBucket = Bucket.builder(storage, "b").build(); + compareBuckets(incompleteBucket, incompleteBucket.toBuilder().build()); + } + + @Test + public void testOf() { + replay(storage); + initializeObjectsWithServiceDependencies(); + Bucket bucket = Bucket.of(storage, "bucket"); + assertEquals("bucket", bucket.name()); + } + + @Test + public void testBuilder() { + replay(storage); + initializeObjectsWithServiceDependencies(); + assertEquals("b", fullBucket.name()); + assertEquals(ACL, fullBucket.acl()); + assertEquals(ETAG, fullBucket.etag()); + assertEquals(ID, fullBucket.id()); + assertEquals(META_GENERATION, fullBucket.metageneration()); + assertEquals(OWNER, fullBucket.owner()); + assertEquals(SELF_LINK, fullBucket.selfLink()); + assertEquals(CREATE_TIME, fullBucket.createTime()); + assertEquals(CORS, fullBucket.cors()); + assertEquals(DEFAULT_ACL, fullBucket.defaultAcl()); + assertEquals(DELETE_RULES, fullBucket.deleteRules()); + assertEquals(INDEX_PAGE, fullBucket.indexPage()); + assertEquals(NOT_FOUND_PAGE, fullBucket.notFoundPage()); + assertEquals(LOCATION, fullBucket.location()); + assertEquals(STORAGE_CLASS, fullBucket.storageClass()); + assertEquals(VERSIONING_ENABLED, fullBucket.versioningEnabled()); + } + + @Test + public void testToPbAndFromPb() { + replay(storage); + initializeObjectsWithServiceDependencies(); + compareBuckets(fullBucket, Bucket.fromPb(storage, fullBucket.toPb())); + Bucket bucket = Bucket.of(storage, "b"); + compareBuckets(bucket, Bucket.fromPb(storage, bucket.toPb())); + } + + private void compareBuckets(Bucket expected, Bucket value) { + assertEquals(expected, value); + assertEquals(expected.name(), value.name()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.metageneration(), value.metageneration()); + assertEquals(expected.owner(), value.owner()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.createTime(), value.createTime()); + assertEquals(expected.cors(), value.cors()); + assertEquals(expected.defaultAcl(), value.defaultAcl()); + assertEquals(expected.deleteRules(), value.deleteRules()); + assertEquals(expected.indexPage(), value.indexPage()); + assertEquals(expected.notFoundPage(), value.notFoundPage()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.storageClass(), value.storageClass()); + assertEquals(expected.versioningEnabled(), value.versioningEnabled()); + assertEquals(expected.storage().options(), value.storage().options()); + } + + @Test + public void testDeleteRules() { + replay(storage); + AgeDeleteRule ageRule = new AgeDeleteRule(10); + assertEquals(10, ageRule.daysToLive()); + assertEquals(Type.AGE, ageRule.type()); + CreatedBeforeDeleteRule createBeforeRule = new CreatedBeforeDeleteRule(1); + assertEquals(1, createBeforeRule.timeMillis()); + assertEquals(Type.CREATE_BEFORE, createBeforeRule.type()); + NumNewerVersionsDeleteRule versionsRule = new NumNewerVersionsDeleteRule(2); + assertEquals(2, versionsRule.numNewerVersions()); + assertEquals(Type.NUM_NEWER_VERSIONS, versionsRule.type()); + IsLiveDeleteRule isLiveRule = new IsLiveDeleteRule(true); + assertTrue(isLiveRule.isLive()); + assertEquals(Type.IS_LIVE, isLiveRule.type()); + Rule rule = new Rule().set("a", "b"); + RawDeleteRule rawRule = new RawDeleteRule(rule); + assertEquals(Type.UNKNOWN, rawRule.type()); + ImmutableList rules = + ImmutableList.of(ageRule, createBeforeRule, versionsRule, isLiveRule, rawRule); + for (DeleteRule delRule : rules) { + assertEquals(delRule, DeleteRule.fromPb(delRule.toPb())); + } } @Test public void testExists_True() throws Exception { Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()}; - expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(BUCKET_INFO); + expect(storage.get(expectedSimpleBucket.name(), expectedOptions)) + .andReturn(expectedSimpleBucket); replay(storage); - assertTrue(bucket.exists()); + initializeObjectsWithServiceDependencies(); + assertTrue(simpleBucket.exists()); } @Test public void testExists_False() throws Exception { Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()}; - expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(null); + expect(storage.get(expectedSimpleBucket.name(), expectedOptions)).andReturn(null); replay(storage); - assertFalse(bucket.exists()); + initializeObjectsWithServiceDependencies(); + assertFalse(simpleBucket.exists()); } @Test public void testReload() throws Exception { - BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build(); - expect(storage.get(updatedInfo.name())).andReturn(updatedInfo); + Bucket updated = expectedSimpleBucket.toBuilder().notFoundPage("p").build(); + expect(storage.get(updated.name())).andReturn(updated); replay(storage); - Bucket updatedBucket = bucket.reload(); - assertSame(storage, updatedBucket.storage()); - assertEquals(updatedInfo, updatedBucket.info()); + initializeObjectsWithServiceDependencies(); + Bucket updatedBucket = simpleBucket.reload(); + assertSame(simpleStorageMock, updatedBucket.storage()); + assertEquals(updated, updatedBucket); } @Test public void testReloadNull() throws Exception { - expect(storage.get(BUCKET_INFO.name())).andReturn(null); + expect(storage.get(expectedSimpleBucket.name())).andReturn(null); replay(storage); - assertNull(bucket.reload()); + initializeObjectsWithServiceDependencies(); + assertNull(simpleBucket.reload()); } @Test public void testReloadWithOptions() throws Exception { - BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build(); - expect(storage.get(updatedInfo.name(), Storage.BucketGetOption.metagenerationMatch(42L))) - .andReturn(updatedInfo); + Bucket updated = expectedSimpleBucket.toBuilder().notFoundPage("p").build(); + expect(storage.get(updated.name(), Storage.BucketGetOption.metagenerationMatch(42L))) + .andReturn(updated); replay(storage); - Bucket updatedBucket = bucket.reload(Bucket.BucketSourceOption.metagenerationMatch()); - assertSame(storage, updatedBucket.storage()); - assertEquals(updatedInfo, updatedBucket.info()); + initializeObjectsWithServiceDependencies(); + Bucket updatedBucket = simpleBucket.reload(Bucket.BucketSourceOption.metagenerationMatch()); + assertSame(simpleStorageMock, updatedBucket.storage()); + assertEquals(updated, updatedBucket); } @Test public void testUpdate() throws Exception { - BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build(); - expect(storage.update(updatedInfo)).andReturn(updatedInfo); + Bucket expected = expectedSimpleBucket.toBuilder().notFoundPage("p").build(); + expect(storage.update(expected)).andReturn(expected); replay(storage); - Bucket updatedBucket = bucket.update(updatedInfo); - assertSame(storage, bucket.storage()); - assertEquals(updatedInfo, updatedBucket.info()); + initializeObjectsWithServiceDependencies(); + Bucket updatedBucket = simpleBucket.update(expected); + assertSame(storage, simpleBucket.storage()); + assertEquals(expected, updatedBucket); } @Test public void testDelete() throws Exception { - expect(storage.delete(BUCKET_INFO.name())).andReturn(true); + expect(storage.delete(expectedSimpleBucket.name())).andReturn(true); replay(storage); - assertTrue(bucket.delete()); + initializeObjectsWithServiceDependencies(); + assertTrue(simpleBucket.delete()); } @Test public void testList() throws Exception { StorageOptions storageOptions = createStrictMock(StorageOptions.class); - PageImpl blobInfoPage = new PageImpl<>(null, "c", BLOB_INFO_RESULTS); - expect(storage.list(BUCKET_INFO.name())).andReturn(blobInfoPage); - expect(storage.options()).andReturn(storageOptions); - expect(storageOptions.service()).andReturn(storage); + PageImpl expectedPage = new PageImpl<>(null, "c", expectedBlobResults); + expect(storage.list(expectedSimpleBucket.name())).andReturn(expectedPage); replay(storage, storageOptions); - Page blobPage = bucket.list(); - Iterator blobInfoIterator = blobInfoPage.values().iterator(); + initializeObjectsWithServiceDependencies(); + Page blobPage = simpleBucket.list(); + Iterator expectedBlobIterator = expectedPage.values().iterator(); Iterator blobIterator = blobPage.values().iterator(); - while (blobInfoIterator.hasNext() && blobIterator.hasNext()) { - assertEquals(blobInfoIterator.next(), blobIterator.next().info()); + while (expectedBlobIterator.hasNext() && blobIterator.hasNext()) { + assertEquals(expectedBlobIterator.next(), blobIterator.next()); } - assertFalse(blobInfoIterator.hasNext()); + assertFalse(expectedBlobIterator.hasNext()); assertFalse(blobIterator.hasNext()); - assertEquals(blobInfoPage.nextPageCursor(), blobPage.nextPageCursor()); + assertEquals(expectedPage.nextPageCursor(), blobPage.nextPageCursor()); verify(storageOptions); } + @Test + public void testCreateBucket() throws Exception { + expect(storage.create(expectedSimpleBucket)).andReturn(expectedSimpleBucket); + replay(storage); + initializeObjectsWithServiceDependencies(); + Bucket bucket = simpleBucket.create(); + assertEquals(simpleBucket, bucket); + } + @Test public void testGet() throws Exception { - BlobInfo info = BlobInfo.builder("b", "n").build(); - expect(storage.get(BlobId.of(bucket.info().name(), "n"), new Storage.BlobGetOption[0])) - .andReturn(info); + Blob expected = Blob.builder(simpleStorageMock, "b", "n").build(); + expect(storage.get(BlobId.of(expectedSimpleBucket.name(), "n"), new Storage.BlobGetOption[0])) + .andReturn(expected); replay(storage); - Blob blob = bucket.get("n"); - assertEquals(info, blob.info()); + initializeObjectsWithServiceDependencies(); + Blob blob = simpleBucket.get("n"); + assertEquals(expected, blob); } @Test public void testGetAll() throws Exception { Capture capturedBatchRequest = Capture.newInstance(); - List> batchResultList = new LinkedList<>(); - for (BlobInfo info : BLOB_INFO_RESULTS) { - batchResultList.add(new Result<>(info)); + List> batchResultList = new LinkedList<>(); + for (Blob blob : expectedBlobResults) { + batchResultList.add(new Result<>(blob)); } BatchResponse response = new BatchResponse(Collections.>emptyList(), - Collections.>emptyList(), batchResultList); + Collections.>emptyList(), batchResultList); expect(storage.apply(capture(capturedBatchRequest))).andReturn(response); replay(storage); - List blobs = bucket.get("n1", "n2", "n3"); - Set blobInfoSet = capturedBatchRequest.getValue().toGet().keySet(); - assertEquals(batchResultList.size(), blobInfoSet.size()); - for (BlobInfo info : BLOB_INFO_RESULTS) { - assertTrue(blobInfoSet.contains(info.blobId())); + initializeObjectsWithServiceDependencies(); + List blobs = simpleBucket.get("n1", "n2", "n3"); + Set blobSet = capturedBatchRequest.getValue().toGet().keySet(); + assertEquals(batchResultList.size(), blobSet.size()); + for (Blob blob : blobResults) { + assertTrue(blobSet.contains(blob.blobId())); } Iterator blobIterator = blobs.iterator(); - Iterator> batchResultIterator = response.gets().iterator(); + Iterator> batchResultIterator = response.gets().iterator(); while (batchResultIterator.hasNext() && blobIterator.hasNext()) { - assertEquals(batchResultIterator.next().get(), blobIterator.next().info()); + assertEquals(batchResultIterator.next().get(), blobIterator.next()); } assertFalse(batchResultIterator.hasNext()); assertFalse(blobIterator.hasNext()); } @Test - public void testCreate() throws Exception { - BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build(); + public void testCreateBlob() throws Exception { + Blob expected = Blob.builder(simpleStorageMock, "b", "n").contentType(CONTENT_TYPE).build(); byte[] content = {0xD, 0xE, 0xA, 0xD}; - expect(storage.create(info, content)).andReturn(info); + expect(storage.create(expected, content)).andReturn(expected); replay(storage); - Blob blob = bucket.create("n", content, CONTENT_TYPE); - assertEquals(info, blob.info()); + initializeObjectsWithServiceDependencies(); + Blob blob = simpleBucket.create("n", content, CONTENT_TYPE); + assertEquals(expected, blob); } @Test public void testCreateNullContentType() throws Exception { - BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); + Blob expected = + Blob.builder(simpleStorageMock, "b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); byte[] content = {0xD, 0xE, 0xA, 0xD}; - expect(storage.create(info, content)).andReturn(info); + expect(storage.create(expected, content)).andReturn(expected); replay(storage); - Blob blob = bucket.create("n", content, null); - assertEquals(info, blob.info()); + initializeObjectsWithServiceDependencies(); + Blob blob = simpleBucket.create("n", content, null); + assertEquals(expected, blob); } @Test public void testCreateFromStream() throws Exception { - BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build(); + Blob expected = Blob.builder(simpleStorageMock, "b", "n").contentType(CONTENT_TYPE).build(); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); - expect(storage.create(info, streamContent)).andReturn(info); + expect(storage.create(expected, streamContent)).andReturn(expected); replay(storage); - Blob blob = bucket.create("n", streamContent, CONTENT_TYPE); - assertEquals(info, blob.info()); + initializeObjectsWithServiceDependencies(); + Blob blob = simpleBucket.create("n", streamContent, CONTENT_TYPE); + assertEquals(expected, blob); } @Test public void testCreateFromStreamNullContentType() throws Exception { - BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); + Blob expected = + Blob.builder(simpleStorageMock, "b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); - expect(storage.create(info, streamContent)).andReturn(info); + expect(storage.create(expected, streamContent)).andReturn(expected); replay(storage); - Blob blob = bucket.create("n", streamContent, null); - assertEquals(info, blob.info()); + initializeObjectsWithServiceDependencies(); + Blob blob = simpleBucket.create("n", streamContent, null); + assertEquals(expected, blob); } @Test public void testStaticGet() throws Exception { - expect(storage.get(BUCKET_INFO.name())).andReturn(BUCKET_INFO); + expect(storage.get(expectedSimpleBucket.name())).andReturn(expectedSimpleBucket); replay(storage); - Bucket loadedBucket = Bucket.get(storage, BUCKET_INFO.name()); + initializeObjectsWithServiceDependencies(); + Bucket loadedBucket = Bucket.get(storage, simpleBucket.name()); assertNotNull(loadedBucket); - assertEquals(BUCKET_INFO, loadedBucket.info()); + assertEquals(simpleBucket, loadedBucket); } @Test public void testStaticGetNull() throws Exception { - expect(storage.get(BUCKET_INFO.name())).andReturn(null); + expect(storage.get(expectedSimpleBucket.name())).andReturn(null); replay(storage); - assertNull(Bucket.get(storage, BUCKET_INFO.name())); + initializeObjectsWithServiceDependencies(); + assertNull(Bucket.get(storage, simpleBucket.name())); } @Test public void testStaticGetWithOptions() throws Exception { - expect(storage.get(BUCKET_INFO.name(), Storage.BucketGetOption.fields())) - .andReturn(BUCKET_INFO); + expect(storage.get(expectedSimpleBucket.name(), Storage.BucketGetOption.fields())) + .andReturn(expectedSimpleBucket); replay(storage); + initializeObjectsWithServiceDependencies(); Bucket loadedBucket = - Bucket.get(storage, BUCKET_INFO.name(), Storage.BucketGetOption.fields()); + Bucket.get(storage, simpleBucket.name(), Storage.BucketGetOption.fields()); assertNotNull(loadedBucket); - assertEquals(BUCKET_INFO, loadedBucket.info()); + assertEquals(simpleBucket, loadedBucket); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java index b7e8d14e53a1..7ec712bae357 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java @@ -23,6 +23,8 @@ import com.google.gcloud.storage.Storage.BlobSourceOption; import com.google.gcloud.storage.Storage.BlobTargetOption; +import org.easymock.EasyMock; +import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -36,8 +38,19 @@ public class CopyRequestTest { private static final String TARGET_BLOB_CONTENT_TYPE = "contentType"; private static final BlobId SOURCE_BLOB_ID = BlobId.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME); private static final BlobId TARGET_BLOB_ID = BlobId.of(TARGET_BUCKET_NAME, TARGET_BLOB_NAME); - private static final BlobInfo TARGET_BLOB_INFO = BlobInfo.builder(TARGET_BLOB_ID) - .contentType(TARGET_BLOB_CONTENT_TYPE).build(); + + + private Storage storage; + private Blob targetBlob; + + @Before + public void setUp() { + storage = EasyMock.createMock(Storage.class); + EasyMock.expect(storage.options()).andReturn(null).anyTimes(); + EasyMock.replay(storage); + targetBlob = + Blob.builder(storage, TARGET_BLOB_ID).contentType(TARGET_BLOB_CONTENT_TYPE).build(); + } @Rule public ExpectedException thrown = ExpectedException.none(); @@ -47,62 +60,66 @@ public void testCopyRequest() { Storage.CopyRequest copyRequest1 = Storage.CopyRequest.builder() .source(SOURCE_BLOB_ID) .sourceOptions(BlobSourceOption.generationMatch(1)) - .target(TARGET_BLOB_INFO, BlobTargetOption.predefinedAcl(PUBLIC_READ)) + .target(targetBlob, BlobTargetOption.predefinedAcl(PUBLIC_READ)) .build(); assertEquals(SOURCE_BLOB_ID, copyRequest1.source()); assertEquals(1, copyRequest1.sourceOptions().size()); assertEquals(BlobSourceOption.generationMatch(1), copyRequest1.sourceOptions().get(0)); - assertEquals(TARGET_BLOB_INFO, copyRequest1.target()); + assertEquals(targetBlob, copyRequest1.target()); assertEquals(1, copyRequest1.targetOptions().size()); assertEquals(BlobTargetOption.predefinedAcl(PUBLIC_READ), copyRequest1.targetOptions().get(0)); Storage.CopyRequest copyRequest2 = Storage.CopyRequest.builder() .source(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME) - .target(TARGET_BLOB_ID) + .target(storage, TARGET_BLOB_ID) .build(); assertEquals(SOURCE_BLOB_ID, copyRequest2.source()); - assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest2.target()); + assertEquals(Blob.builder(storage, TARGET_BLOB_ID).build(), copyRequest2.target()); Storage.CopyRequest copyRequest3 = Storage.CopyRequest.builder() .source(SOURCE_BLOB_ID) - .target(TARGET_BLOB_INFO, ImmutableList.of(BlobTargetOption.predefinedAcl(PUBLIC_READ))) + .target(targetBlob, ImmutableList.of(BlobTargetOption.predefinedAcl(PUBLIC_READ))) .build(); assertEquals(SOURCE_BLOB_ID, copyRequest3.source()); - assertEquals(TARGET_BLOB_INFO, copyRequest3.target()); + assertEquals(targetBlob, copyRequest3.target()); assertEquals(ImmutableList.of(BlobTargetOption.predefinedAcl(PUBLIC_READ)), copyRequest3.targetOptions()); } @Test public void testCopyRequestOf() { - Storage.CopyRequest copyRequest1 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_INFO); + Storage.CopyRequest copyRequest1 = Storage.CopyRequest.of(SOURCE_BLOB_ID, targetBlob); assertEquals(SOURCE_BLOB_ID, copyRequest1.source()); - assertEquals(TARGET_BLOB_INFO, copyRequest1.target()); + assertEquals(targetBlob, copyRequest1.target()); - Storage.CopyRequest copyRequest2 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_NAME); + Storage.CopyRequest copyRequest2 = + Storage.CopyRequest.of(storage, SOURCE_BLOB_ID, TARGET_BLOB_NAME); assertEquals(SOURCE_BLOB_ID, copyRequest2.source()); - assertEquals(BlobInfo.builder(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), + assertEquals( + Blob.builder(storage, SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), copyRequest2.target()); Storage.CopyRequest copyRequest3 = - Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_INFO); + Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, targetBlob); assertEquals(SOURCE_BLOB_ID, copyRequest3.source()); - assertEquals(TARGET_BLOB_INFO, copyRequest3.target()); + assertEquals(targetBlob, copyRequest3.target()); Storage.CopyRequest copyRequest4 = - Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_NAME); + Storage.CopyRequest.of(storage, SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_NAME); assertEquals(SOURCE_BLOB_ID, copyRequest4.source()); - assertEquals(BlobInfo.builder(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), + assertEquals( + Blob.builder(storage, SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), copyRequest4.target()); - Storage.CopyRequest copyRequest5 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_ID); + Storage.CopyRequest copyRequest5 = + Storage.CopyRequest.of(storage, SOURCE_BLOB_ID, TARGET_BLOB_ID); assertEquals(SOURCE_BLOB_ID, copyRequest5.source()); - assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest5.target()); + assertEquals(Blob.builder(storage, TARGET_BLOB_ID).build(), copyRequest5.target()); Storage.CopyRequest copyRequest6 = - Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_ID); + Storage.CopyRequest.of(storage, SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_ID); assertEquals(SOURCE_BLOB_ID, copyRequest6.source()); - assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest6.target()); + assertEquals(Blob.builder(storage, TARGET_BLOB_ID).build(), copyRequest6.target()); } @Test @@ -110,20 +127,20 @@ public void testCopyRequestFail() { thrown.expect(IllegalArgumentException.class); Storage.CopyRequest.builder() .source(SOURCE_BLOB_ID) - .target(BlobInfo.builder(TARGET_BLOB_ID).build()) + .target(Blob.builder(storage, TARGET_BLOB_ID).build()) .build(); } @Test - public void testCopyRequestOfBlobInfoFail() { + public void testCopyRequestOfBlobFail() { thrown.expect(IllegalArgumentException.class); - Storage.CopyRequest.of(SOURCE_BLOB_ID, BlobInfo.builder(TARGET_BLOB_ID).build()); + Storage.CopyRequest.of(SOURCE_BLOB_ID, Blob.builder(storage, TARGET_BLOB_ID).build()); } @Test public void testCopyRequestOfStringFail() { thrown.expect(IllegalArgumentException.class); Storage.CopyRequest.of( - SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, BlobInfo.builder(TARGET_BLOB_ID).build()); + SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, Blob.builder(storage, TARGET_BLOB_ID).build()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java index 1b1ffd987de6..a6d814cd7f43 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java @@ -46,22 +46,17 @@ public class CopyWriterTest { private static final String DESTINATION_BUCKET_NAME = "b1"; private static final String DESTINATION_BLOB_NAME = "n1"; private static final BlobId BLOB_ID = BlobId.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME); - private static final BlobInfo BLOB_INFO = - BlobInfo.builder(DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).build(); - private static final BlobInfo RESULT = - BlobInfo.builder(DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).contentType("type").build(); private static final Map EMPTY_OPTIONS = ImmutableMap.of(); - private static final RewriteRequest REQUEST = new StorageRpc.RewriteRequest(BLOB_ID.toPb(), - EMPTY_OPTIONS, BLOB_INFO.toPb(), EMPTY_OPTIONS, null); - private static final RewriteResponse RESPONSE = new StorageRpc.RewriteResponse(REQUEST, - null, 42L, false, "token", 21L); - private static final RewriteResponse RESPONSE_DONE = new StorageRpc.RewriteResponse(REQUEST, - RESULT.toPb(), 42L, true, "token", 42L); private StorageOptions options; private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; private CopyWriter copyWriter; + private Blob blob; + private Blob result; + private RewriteRequest request; + private RewriteResponse response; + private RewriteResponse responseDone; @Before public void setUp() { @@ -75,6 +70,15 @@ public void setUp() { .serviceRpcFactory(rpcFactoryMock) .retryParams(RetryParams.noRetries()) .build(); + blob = Blob.builder(options.service(), DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).build(); + result = + Blob.builder(options.service(), DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME) + .contentType("type") + .build(); + request = new StorageRpc.RewriteRequest( + BLOB_ID.toPb(), EMPTY_OPTIONS, blob.toPb(), EMPTY_OPTIONS, null); + response = new StorageRpc.RewriteResponse(request, null, 42L, false, "token", 21L); + responseDone = new StorageRpc.RewriteResponse(request, result.toPb(), 42L, true, "token", 42L); } @After @@ -84,10 +88,10 @@ public void tearDown() throws Exception { @Test public void testRewrite() { - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + EasyMock.expect(storageRpcMock.continueRewrite(response)).andReturn(responseDone); EasyMock.replay(storageRpcMock); - copyWriter = new CopyWriter(options, RESPONSE); - assertEquals(RESULT, copyWriter.result()); + copyWriter = new CopyWriter(options, response); + assertEquals(result, copyWriter.result()); assertTrue(copyWriter.isDone()); assertEquals(42L, copyWriter.totalBytesCopied()); assertEquals(42L, copyWriter.blobSize()); @@ -95,11 +99,11 @@ public void testRewrite() { @Test public void testRewriteMultipleRequests() { - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE); - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + EasyMock.expect(storageRpcMock.continueRewrite(response)).andReturn(response); + EasyMock.expect(storageRpcMock.continueRewrite(response)).andReturn(responseDone); EasyMock.replay(storageRpcMock); - copyWriter = new CopyWriter(options, RESPONSE); - assertEquals(RESULT, copyWriter.result()); + copyWriter = new CopyWriter(options, response); + assertEquals(result, copyWriter.result()); assertTrue(copyWriter.isDone()); assertEquals(42L, copyWriter.totalBytesCopied()); assertEquals(42L, copyWriter.blobSize()); @@ -107,17 +111,17 @@ public void testRewriteMultipleRequests() { @Test public void testSaveAndRestore() { - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE); - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + EasyMock.expect(storageRpcMock.continueRewrite(response)).andReturn(response); + EasyMock.expect(storageRpcMock.continueRewrite(response)).andReturn(responseDone); EasyMock.replay(storageRpcMock); - copyWriter = new CopyWriter(options, RESPONSE); + copyWriter = new CopyWriter(options, response); copyWriter.copyChunk(); assertTrue(!copyWriter.isDone()); assertEquals(21L, copyWriter.totalBytesCopied()); assertEquals(42L, copyWriter.blobSize()); RestorableState rewriterState = copyWriter.capture(); CopyWriter restoredRewriter = rewriterState.restore(); - assertEquals(RESULT, restoredRewriter.result()); + assertEquals(result, restoredRewriter.result()); assertTrue(restoredRewriter.isDone()); assertEquals(42L, restoredRewriter.totalBytesCopied()); assertEquals(42L, restoredRewriter.blobSize()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 614ceee7b61e..3b4da8334ccb 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -72,7 +72,7 @@ public class ITStorageTest { public static void beforeClass() { RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); storage = gcsHelper.options().service(); - storage.create(BucketInfo.of(BUCKET)); + Bucket.of(storage, BUCKET).create(); } @AfterClass @@ -86,7 +86,8 @@ public static void afterClass() throws ExecutionException, InterruptedException @Test(timeout = 5000) public void testListBuckets() throws InterruptedException { - Iterator bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET), + Iterator bucketIterator = + storage.list(Storage.BucketListOption.prefix(BUCKET), Storage.BucketListOption.fields()).values().iterator(); while (!bucketIterator.hasNext()) { Thread.sleep(500); @@ -94,7 +95,7 @@ public void testListBuckets() throws InterruptedException { Storage.BucketListOption.fields()).values().iterator(); } while (bucketIterator.hasNext()) { - BucketInfo remoteBucket = bucketIterator.next(); + Bucket remoteBucket = bucketIterator.next(); assertTrue(remoteBucket.name().startsWith(BUCKET)); assertNull(remoteBucket.createTime()); assertNull(remoteBucket.selfLink()); @@ -103,7 +104,7 @@ public void testListBuckets() throws InterruptedException { @Test public void testGetBucketSelectedFields() { - BucketInfo remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields(BucketField.ID)); + Bucket remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields(BucketField.ID)); assertEquals(BUCKET, remoteBucket.name()); assertNull(remoteBucket.createTime()); assertNotNull(remoteBucket.id()); @@ -111,7 +112,7 @@ public void testGetBucketSelectedFields() { @Test public void testGetBucketAllSelectedFields() { - BucketInfo remoteBucket = storage.get(BUCKET, + Bucket remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields(BucketField.values())); assertEquals(BUCKET, remoteBucket.name()); assertNotNull(remoteBucket.createTime()); @@ -120,7 +121,7 @@ public void testGetBucketAllSelectedFields() { @Test public void testGetBucketEmptyFields() { - BucketInfo remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields()); + Bucket remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields()); assertEquals(BUCKET, remoteBucket.name()); assertNull(remoteBucket.createTime()); assertNull(remoteBucket.selfLink()); @@ -129,8 +130,8 @@ public void testGetBucketEmptyFields() { @Test public void testCreateBlob() { String blobName = "test-create-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); - BlobInfo remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); + Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); @@ -142,8 +143,8 @@ public void testCreateBlob() { @Test public void testCreateEmptyBlob() { String blobName = "test-create-empty-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); - BlobInfo remoteBlob = storage.create(blob); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); + Blob remoteBlob = storage.create(blob); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); @@ -155,9 +156,9 @@ public void testCreateEmptyBlob() { @Test public void testCreateBlobStream() { String blobName = "test-create-blob-stream"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).contentType(CONTENT_TYPE).build(); ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8)); - BlobInfo remoteBlob = storage.create(blob, stream); + Blob remoteBlob = storage.create(blob, stream); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); @@ -170,9 +171,9 @@ public void testCreateBlobStream() { @Test public void testCreateBlobFail() { String blobName = "test-create-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L).build(); + Blob wrongGenerationBlob = Blob.builder(storage, BUCKET, blobName, -1L).build(); try { storage.create(wrongGenerationBlob, BLOB_BYTE_CONTENT, Storage.BlobTargetOption.generationMatch()); @@ -186,7 +187,8 @@ public void testCreateBlobFail() { @Test public void testCreateBlobMd5Fail() { String blobName = "test-create-blob-md5-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + Blob blob = + Blob.builder(storage, BUCKET, blobName) .contentType(CONTENT_TYPE) .md5("O1R4G1HJSDUISJjoIYmVhQ==") .build(); @@ -202,9 +204,9 @@ public void testCreateBlobMd5Fail() { @Test public void testGetBlobEmptySelectedFields() { String blobName = "test-get-empty-selected-fields-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).contentType(CONTENT_TYPE).build(); assertNotNull(storage.create(blob)); - BlobInfo remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields()); + Blob remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields()); assertEquals(blob.blobId(), remoteBlob.blobId()); assertNull(remoteBlob.contentType()); assertTrue(storage.delete(BUCKET, blobName)); @@ -213,12 +215,13 @@ public void testGetBlobEmptySelectedFields() { @Test public void testGetBlobSelectedFields() { String blobName = "test-get-selected-fields-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + Blob blob = + Blob.builder(storage, BUCKET, blobName) .contentType(CONTENT_TYPE) .metadata(ImmutableMap.of("k", "v")) .build(); assertNotNull(storage.create(blob)); - BlobInfo remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields( + Blob remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields( BlobField.METADATA)); assertEquals(blob.blobId(), remoteBlob.blobId()); assertEquals(ImmutableMap.of("k", "v"), remoteBlob.metadata()); @@ -229,12 +232,13 @@ public void testGetBlobSelectedFields() { @Test public void testGetBlobAllSelectedFields() { String blobName = "test-get-all-selected-fields-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + Blob blob = + Blob.builder(storage, BUCKET, blobName) .contentType(CONTENT_TYPE) .metadata(ImmutableMap.of("k", "v")) .build(); assertNotNull(storage.create(blob)); - BlobInfo remoteBlob = storage.get(blob.blobId(), + Blob remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields(BlobField.values())); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); @@ -247,7 +251,7 @@ public void testGetBlobAllSelectedFields() { @Test public void testGetBlobFail() { String blobName = "test-get-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName); try { @@ -262,7 +266,7 @@ public void testGetBlobFail() { @Test public void testGetBlobFailNonExistingGeneration() { String blobName = "test-get-blob-fail-non-existing-generation"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName, -1L); assertNull(storage.get(wrongGenerationBlob)); @@ -274,21 +278,24 @@ public void testListBlobsSelectedFields() { String[] blobNames = {"test-list-blobs-selected-fields-blob1", "test-list-blobs-selected-fields-blob2"}; ImmutableMap metadata = ImmutableMap.of("k", "v"); - BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0]) + Blob blob1 = + Blob.builder(storage, BUCKET, blobNames[0]) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); - BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1]) + Blob blob2 = + Blob.builder(storage, BUCKET, blobNames[1]) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); assertNotNull(storage.create(blob1)); assertNotNull(storage.create(blob2)); - Page page = storage.list(BUCKET, + Page page = + storage.list(BUCKET, Storage.BlobListOption.prefix("test-list-blobs-selected-fields-blob"), Storage.BlobListOption.fields(BlobField.METADATA)); int index = 0; - for (BlobInfo remoteBlob : page.values()) { + for (Blob remoteBlob : page.values()) { assertEquals(BUCKET, remoteBlob.bucket()); assertEquals(blobNames[index++], remoteBlob.name()); assertEquals(metadata, remoteBlob.metadata()); @@ -302,19 +309,20 @@ public void testListBlobsSelectedFields() { public void testListBlobsEmptySelectedFields() { String[] blobNames = {"test-list-blobs-empty-selected-fields-blob1", "test-list-blobs-empty-selected-fields-blob2"}; - BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0]) + Blob blob1 = Blob.builder(storage, BUCKET, blobNames[0]) .contentType(CONTENT_TYPE) .build(); - BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1]) + Blob blob2 = Blob.builder(storage, BUCKET, blobNames[1]) .contentType(CONTENT_TYPE) .build(); assertNotNull(storage.create(blob1)); assertNotNull(storage.create(blob2)); - Page page = storage.list(BUCKET, + Page page = storage.list( + BUCKET, Storage.BlobListOption.prefix("test-list-blobs-empty-selected-fields-blob"), Storage.BlobListOption.fields()); int index = 0; - for (BlobInfo remoteBlob : page.values()) { + for (Blob remoteBlob : page.values()) { assertEquals(BUCKET, remoteBlob.bucket()); assertEquals(blobNames[index++], remoteBlob.name()); assertNull(remoteBlob.contentType()); @@ -326,9 +334,9 @@ public void testListBlobsEmptySelectedFields() { @Test public void testUpdateBlob() { String blobName = "test-update-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = storage.update(blob.toBuilder().contentType(CONTENT_TYPE).build()); + Blob updatedBlob = storage.update(blob.toBuilder().contentType(CONTENT_TYPE).build()); assertNotNull(updatedBlob); assertEquals(blob.name(), updatedBlob.name()); assertEquals(blob.bucket(), updatedBlob.bucket()); @@ -341,12 +349,13 @@ public void testUpdateBlobReplaceMetadata() { String blobName = "test-update-blob-replace-metadata"; ImmutableMap metadata = ImmutableMap.of("k1", "a"); ImmutableMap newMetadata = ImmutableMap.of("k2", "b"); - BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + Blob blob = + Blob.builder(storage, BUCKET, blobName) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(null).build()); + Blob updatedBlob = storage.update(blob.toBuilder().metadata(null).build()); assertNotNull(updatedBlob); assertNull(updatedBlob.metadata()); updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); @@ -362,12 +371,13 @@ public void testUpdateBlobMergeMetadata() { ImmutableMap metadata = ImmutableMap.of("k1", "a"); ImmutableMap newMetadata = ImmutableMap.of("k2", "b"); ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a", "k2", "b"); - BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + Blob blob = + Blob.builder(storage, BUCKET, blobName) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); + Blob updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); assertNotNull(updatedBlob); assertEquals(blob.name(), updatedBlob.name()); assertEquals(blob.bucket(), updatedBlob.bucket()); @@ -383,12 +393,13 @@ public void testUpdateBlobUnsetMetadata() { newMetadata.put("k1", "a"); newMetadata.put("k2", null); ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a"); - BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + Blob blob = + Blob.builder(storage, BUCKET, blobName) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); + Blob updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); assertNotNull(updatedBlob); assertEquals(blob.name(), updatedBlob.name()); assertEquals(blob.bucket(), updatedBlob.bucket()); @@ -399,9 +410,10 @@ public void testUpdateBlobUnsetMetadata() { @Test public void testUpdateBlobFail() { String blobName = "test-update-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L) + Blob wrongGenerationBlob = + Blob.builder(storage, BUCKET, blobName, -1L) .contentType(CONTENT_TYPE) .build(); try { @@ -422,7 +434,7 @@ public void testDeleteNonExistingBlob() { @Test public void testDeleteBlobNonExistingGeneration() { String blobName = "test-delete-blob-non-existing-generation"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); assertTrue(!storage.delete(BlobId.of(BUCKET, blobName, -1L))); } @@ -430,7 +442,7 @@ public void testDeleteBlobNonExistingGeneration() { @Test public void testDeleteBlobFail() { String blobName = "test-delete-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); try { storage.delete(BUCKET, blob.name(), Storage.BlobSourceOption.generationMatch(-1L)); @@ -445,15 +457,15 @@ public void testDeleteBlobFail() { public void testComposeBlob() { String sourceBlobName1 = "test-compose-blob-source-1"; String sourceBlobName2 = "test-compose-blob-source-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1, BLOB_BYTE_CONTENT)); assertNotNull(storage.create(sourceBlob2, BLOB_BYTE_CONTENT)); String targetBlobName = "test-compose-blob-target"; - BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build(); + Blob targetBlob = Blob.builder(storage, BUCKET, targetBlobName).build(); Storage.ComposeRequest req = Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob); - BlobInfo remoteBlob = storage.compose(req); + Blob remoteBlob = storage.compose(req); assertNotNull(remoteBlob); assertEquals(targetBlob.name(), remoteBlob.name()); assertEquals(targetBlob.bucket(), remoteBlob.bucket()); @@ -471,12 +483,12 @@ public void testComposeBlob() { public void testComposeBlobFail() { String sourceBlobName1 = "test-compose-blob-fail-source-1"; String sourceBlobName2 = "test-compose-blob-fail-source-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); String targetBlobName = "test-compose-blob-fail-target"; - BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build(); + Blob targetBlob = Blob.builder(storage, BUCKET, targetBlobName).build(); Storage.ComposeRequest req = Storage.ComposeRequest.builder() .addSource(sourceBlobName1, -1L) .addSource(sourceBlobName2, -1L) @@ -497,13 +509,14 @@ public void testCopyBlob() { String sourceBlobName = "test-copy-blob-source"; BlobId source = BlobId.of(BUCKET, sourceBlobName); ImmutableMap metadata = ImmutableMap.of("k", "v"); - BlobInfo blob = BlobInfo.builder(source) + Blob blob = Blob.builder(storage, source) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT)); String targetBlobName = "test-copy-blob-target"; - Storage.CopyRequest req = Storage.CopyRequest.of(source, BlobId.of(BUCKET, targetBlobName)); + Storage.CopyRequest req = + Storage.CopyRequest.of(storage, source, BlobId.of(BUCKET, targetBlobName)); CopyWriter copyWriter = storage.copy(req); assertEquals(BUCKET, copyWriter.result().bucket()); assertEquals(targetBlobName, copyWriter.result().name()); @@ -518,10 +531,11 @@ public void testCopyBlob() { public void testCopyBlobUpdateMetadata() { String sourceBlobName = "test-copy-blob-update-metadata-source"; BlobId source = BlobId.of(BUCKET, sourceBlobName); - assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT)); + assertNotNull(storage.create(Blob.builder(storage, source).build(), BLOB_BYTE_CONTENT)); String targetBlobName = "test-copy-blob-update-metadata-target"; ImmutableMap metadata = ImmutableMap.of("k", "v"); - BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName) + Blob target = + Blob.builder(storage, BUCKET, targetBlobName) .contentType(CONTENT_TYPE) .metadata(metadata) .build(); @@ -540,9 +554,9 @@ public void testCopyBlobUpdateMetadata() { public void testCopyBlobFail() { String sourceBlobName = "test-copy-blob-source-fail"; BlobId source = BlobId.of(BUCKET, sourceBlobName, -1L); - assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT)); + assertNotNull(storage.create(Blob.builder(storage, source).build(), BLOB_BYTE_CONTENT)); String targetBlobName = "test-copy-blob-target-fail"; - BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName).contentType(CONTENT_TYPE).build(); + Blob target = Blob.builder(storage, BUCKET, targetBlobName).contentType(CONTENT_TYPE).build(); Storage.CopyRequest req = Storage.CopyRequest.builder() .source(BUCKET, sourceBlobName) .sourceOptions(Storage.BlobSourceOption.generationMatch(-1L)) @@ -572,14 +586,14 @@ public void testCopyBlobFail() { public void testBatchRequest() { String sourceBlobName1 = "test-batch-request-blob-1"; String sourceBlobName2 = "test-batch-request-blob-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); // Batch update request - BlobInfo updatedBlob1 = sourceBlob1.toBuilder().contentType(CONTENT_TYPE).build(); - BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); + Blob updatedBlob1 = sourceBlob1.toBuilder().contentType(CONTENT_TYPE).build(); + Blob updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); BatchRequest updateRequest = BatchRequest.builder() .update(updatedBlob1) .update(updatedBlob2) @@ -588,8 +602,8 @@ public void testBatchRequest() { assertEquals(2, updateResponse.updates().size()); assertEquals(0, updateResponse.deletes().size()); assertEquals(0, updateResponse.gets().size()); - BlobInfo remoteUpdatedBlob1 = updateResponse.updates().get(0).get(); - BlobInfo remoteUpdatedBlob2 = updateResponse.updates().get(1).get(); + Blob remoteUpdatedBlob1 = updateResponse.updates().get(0).get(); + Blob remoteUpdatedBlob2 = updateResponse.updates().get(1).get(); assertEquals(sourceBlob1.bucket(), remoteUpdatedBlob1.bucket()); assertEquals(sourceBlob1.name(), remoteUpdatedBlob1.name()); assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket()); @@ -606,8 +620,8 @@ public void testBatchRequest() { assertEquals(2, getResponse.gets().size()); assertEquals(0, getResponse.deletes().size()); assertEquals(0, getResponse.updates().size()); - BlobInfo remoteBlob1 = getResponse.gets().get(0).get(); - BlobInfo remoteBlob2 = getResponse.gets().get(1).get(); + Blob remoteBlob1 = getResponse.gets().get(0).get(); + Blob remoteBlob2 = getResponse.gets().get(1).get(); assertEquals(remoteUpdatedBlob1, remoteBlob1); assertEquals(remoteUpdatedBlob2, remoteBlob2); @@ -636,11 +650,11 @@ public void testBatchRequestManyDeletes() { } String sourceBlobName1 = "test-batch-request-many-deletes-source-blob-1"; String sourceBlobName2 = "test-batch-request-many-deletes-source-blob-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); - BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); + Blob updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); BatchRequest updateRequest = builder .get(BUCKET, sourceBlobName1) @@ -658,13 +672,13 @@ public void testBatchRequestManyDeletes() { } // Check updates - BlobInfo remoteUpdatedBlob2 = response.updates().get(0).get(); + Blob remoteUpdatedBlob2 = response.updates().get(0).get(); assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket()); assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name()); assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType()); // Check gets - BlobInfo remoteBlob1 = response.gets().get(0).get(); + Blob remoteBlob1 = response.gets().get(0).get(); assertEquals(sourceBlob1.bucket(), remoteBlob1.bucket()); assertEquals(sourceBlob1.name(), remoteBlob1.name()); @@ -675,9 +689,9 @@ public void testBatchRequestManyDeletes() { @Test public void testBatchRequestFail() { String blobName = "test-batch-request-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = BlobInfo.builder(BUCKET, blobName, -1L).build(); + Blob updatedBlob = Blob.builder(storage, BUCKET, blobName, -1L).build(); BatchRequest batchRequest = BatchRequest.builder() .update(updatedBlob, Storage.BlobTargetOption.generationMatch()) .delete(BUCKET, blobName, Storage.BlobSourceOption.generationMatch(-1L)) @@ -702,7 +716,7 @@ public void testBatchRequestFail() { @Test public void testReadAndWriteChannels() throws IOException { String blobName = "test-read-and-write-channels-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); byte[] stringBytes; try (WriteChannel writer = storage.writer(blob)) { stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); @@ -725,7 +739,7 @@ public void testReadAndWriteChannels() throws IOException { @Test public void testReadAndWriteCaptureChannels() throws IOException { String blobName = "test-read-and-write-capture-channels-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); byte[] stringBytes; WriteChannel writer = storage.writer(blob); stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); @@ -754,7 +768,7 @@ public void testReadAndWriteCaptureChannels() throws IOException { @Test public void testReadChannelFail() throws IOException { String blobName = "test-read-channel-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); try (ReadChannel reader = storage.reader(blob.blobId(), Storage.BlobSourceOption.metagenerationMatch(-1L))) { @@ -784,13 +798,13 @@ public void testReadChannelFail() throws IOException { @Test public void testReadChannelFailUpdatedGeneration() throws IOException { String blobName = "test-read-blob-fail-updated-generation"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); Random random = new Random(); int chunkSize = 1024; int blobSize = 2 * chunkSize; byte[] content = new byte[blobSize]; random.nextBytes(content); - BlobInfo remoteBlob = storage.create(blob, content); + Blob remoteBlob = storage.create(blob, content); assertNotNull(remoteBlob); assertEquals(blobSize, (long) remoteBlob.size()); try (ReadChannel reader = storage.reader(blob.blobId())) { @@ -819,7 +833,7 @@ public void testReadChannelFailUpdatedGeneration() throws IOException { @Test public void testWriteChannelFail() throws IOException { String blobName = "test-write-channel-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName, -1L).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName, -1L).build(); try { try (WriteChannel writer = storage.writer(blob, Storage.BlobWriteOption.generationMatch())) { writer.write(ByteBuffer.allocate(42)); @@ -833,8 +847,8 @@ public void testWriteChannelFail() throws IOException { @Test public void testWriteChannelExistingBlob() throws IOException { String blobName = "test-write-channel-existing-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); - BlobInfo remoteBlob = storage.create(blob); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); + Blob remoteBlob = storage.create(blob); byte[] stringBytes; try (WriteChannel writer = storage.writer(remoteBlob)) { stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); @@ -847,7 +861,7 @@ public void testWriteChannelExistingBlob() throws IOException { @Test public void testGetSignedUrl() throws IOException { String blobName = "test-get-signed-url-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT)); URL url = storage.signUrl(blob, 1, TimeUnit.HOURS); URLConnection connection = url.openConnection(); @@ -862,14 +876,14 @@ public void testGetSignedUrl() throws IOException { @Test public void testPostSignedUrl() throws IOException { String blobName = "test-post-signed-url-blob"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Blob blob = Blob.builder(storage, BUCKET, blobName).build(); assertNotNull(storage.create(blob)); URL url = storage.signUrl(blob, 1, TimeUnit.HOURS, Storage.SignUrlOption.httpMethod(HttpMethod.POST)); URLConnection connection = url.openConnection(); connection.setDoOutput(true); connection.connect(); - BlobInfo remoteBlob = storage.get(BUCKET, blobName); + Blob remoteBlob = storage.get(BUCKET, blobName); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); @@ -880,11 +894,11 @@ public void testPostSignedUrl() throws IOException { public void testGetBlobs() { String sourceBlobName1 = "test-get-blobs-1"; String sourceBlobName2 = "test-get-blobs-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); - List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); + List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket()); assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name()); assertEquals(sourceBlob2.bucket(), remoteBlobs.get(1).bucket()); @@ -897,10 +911,10 @@ public void testGetBlobs() { public void testGetBlobsFail() { String sourceBlobName1 = "test-get-blobs-fail-1"; String sourceBlobName2 = "test-get-blobs-fail-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); - List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); + List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket()); assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name()); assertNull(remoteBlobs.get(1)); @@ -911,8 +925,8 @@ public void testGetBlobsFail() { public void testDeleteBlobs() { String sourceBlobName1 = "test-delete-blobs-1"; String sourceBlobName2 = "test-delete-blobs-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId()); @@ -924,8 +938,8 @@ public void testDeleteBlobs() { public void testDeleteBlobsFail() { String sourceBlobName1 = "test-delete-blobs-fail-1"; String sourceBlobName2 = "test-delete-blobs-fail-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId()); assertTrue(deleteStatus.get(0)); @@ -936,13 +950,13 @@ public void testDeleteBlobsFail() { public void testUpdateBlobs() { String sourceBlobName1 = "test-update-blobs-1"; String sourceBlobName2 = "test-update-blobs-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); - BlobInfo remoteBlob1 = storage.create(sourceBlob1); - BlobInfo remoteBlob2 = storage.create(sourceBlob2); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); + Blob remoteBlob1 = storage.create(sourceBlob1); + Blob remoteBlob2 = storage.create(sourceBlob2); assertNotNull(remoteBlob1); assertNotNull(remoteBlob2); - List updatedBlobs = storage.update( + List updatedBlobs = storage.update( remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(), remoteBlob2.toBuilder().contentType(CONTENT_TYPE).build()); assertEquals(sourceBlob1.bucket(), updatedBlobs.get(0).bucket()); @@ -959,11 +973,11 @@ public void testUpdateBlobs() { public void testUpdateBlobsFail() { String sourceBlobName1 = "test-update-blobs-fail-1"; String sourceBlobName2 = "test-update-blobs-fail-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); - BlobInfo remoteBlob1 = storage.create(sourceBlob1); + Blob sourceBlob1 = Blob.builder(storage, BUCKET, sourceBlobName1).build(); + Blob sourceBlob2 = Blob.builder(storage, BUCKET, sourceBlobName2).build(); + Blob remoteBlob1 = storage.create(sourceBlob1); assertNotNull(remoteBlob1); - List updatedBlobs = storage.update( + List updatedBlobs = storage.update( remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(), sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build()); assertEquals(sourceBlob1.bucket(), updatedBlobs.get(0).bucket()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java index 05b7f5f6fd8c..b7d9896d2261 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java @@ -24,6 +24,7 @@ import com.google.gcloud.storage.testing.RemoteGcsHelper; import org.easymock.EasyMock; +import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -66,43 +67,53 @@ public class RemoteGcsHelperTest { + " \"type\": \"service_account\"\n" + "}"; private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); - private static final List BLOB_LIST = ImmutableList.of( - BlobInfo.builder(BUCKET_NAME, "n1").build(), - BlobInfo.builder(BUCKET_NAME, "n2").build()); private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, "", true); private static final StorageException FATAL_EXCEPTION = new StorageException(500, "", false); - private static final Page BLOB_PAGE = new Page() { - - @Override - public String nextPageCursor() { - return "nextPageCursor"; - } - - @Override - public Page nextPage() { - return null; - } - - @Override - public Iterable values() { - return BLOB_LIST; - } - - @Override - public Iterator iterateAll() { - return BLOB_LIST.iterator(); - } - }; + private Page blobPage; + private static Storage storageMock; + private static Storage simpleStorageMock; + private static List blobList; @Rule public ExpectedException thrown = ExpectedException.none(); + @Before + public void before() { + storageMock = EasyMock.createMock(Storage.class); + simpleStorageMock = EasyMock.createMock(Storage.class); + EasyMock.expect(simpleStorageMock.options()).andReturn(null).anyTimes(); + EasyMock.replay(simpleStorageMock); + blobList = ImmutableList.of( + Blob.builder(simpleStorageMock, BUCKET_NAME, "n1").build(), + Blob.builder(simpleStorageMock, BUCKET_NAME, "n2").build()); + blobPage = new Page() { + @Override + public String nextPageCursor() { + return "nextPageCursor"; + } + + @Override + public Page nextPage() { + return null; + } + + @Override + public Iterable values() { + return blobList; + } + + @Override + public Iterator iterateAll() { + return blobList.iterator(); + } + }; + } + @Test public void testForceDelete() throws InterruptedException, ExecutionException { - Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); - for (BlobInfo info : BLOB_LIST) { - EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(blobPage); + for (Blob blob : blobList) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, blob.name())).andReturn(true); } EasyMock.expect(storageMock.delete(BUCKET_NAME)).andReturn(true); EasyMock.replay(storageMock); @@ -112,10 +123,9 @@ public void testForceDelete() throws InterruptedException, ExecutionException { @Test public void testForceDeleteTimeout() throws InterruptedException, ExecutionException { - Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE).anyTimes(); - for (BlobInfo info : BLOB_LIST) { - EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true).anyTimes(); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(blobPage).anyTimes(); + for (Blob blob : blobList) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, blob.name())).andReturn(true).anyTimes(); } EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(RETRYABLE_EXCEPTION).anyTimes(); EasyMock.replay(storageMock); @@ -125,10 +135,9 @@ public void testForceDeleteTimeout() throws InterruptedException, ExecutionExcep @Test public void testForceDeleteFail() throws InterruptedException, ExecutionException { - Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); - for (BlobInfo info : BLOB_LIST) { - EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(blobPage); + for (Blob blob : blobList) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, blob.name())).andReturn(true); } EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(FATAL_EXCEPTION); EasyMock.replay(storageMock); @@ -142,10 +151,9 @@ public void testForceDeleteFail() throws InterruptedException, ExecutionExceptio @Test public void testForceDeleteNoTimeout() { - Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); - for (BlobInfo info : BLOB_LIST) { - EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(blobPage); + for (Blob blob : blobList) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, blob.name())).andReturn(true); } EasyMock.expect(storageMock.delete(BUCKET_NAME)).andReturn(true); EasyMock.replay(storageMock); @@ -155,10 +163,9 @@ public void testForceDeleteNoTimeout() { @Test public void testForceDeleteNoTimeoutFail() { - Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); - for (BlobInfo info : BLOB_LIST) { - EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(blobPage); + for (Blob blob : blobList) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, blob.name())).andReturn(true); } EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(FATAL_EXCEPTION); EasyMock.replay(storageMock); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java index 8506e8b48f6b..0cea70116e0a 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java @@ -42,23 +42,24 @@ public class SerializationTest { + private static final Storage STORAGE = StorageOptions.defaultInstance().service(); private static final Acl.Domain ACL_DOMAIN = new Acl.Domain("domain"); private static final Acl.Group ACL_GROUP = new Acl.Group("group"); - private static final Acl.Project ACL_PROJECT_ = new Acl.Project(ProjectRole.VIEWERS, "pid"); + private static final Acl.Project ACL_PROJECT = new Acl.Project(ProjectRole.VIEWERS, "pid"); private static final Acl.User ACL_USER = new Acl.User("user"); private static final Acl.RawEntity ACL_RAW = new Acl.RawEntity("raw"); - private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").build(); - private static final BucketInfo BUCKET_INFO = BucketInfo.of("b"); + private static final Blob BLOB = Blob.builder(STORAGE, "b", "n").build(); + private static final Bucket BUCKET = Bucket.of(STORAGE, "b"); private static final Cors.Origin ORIGIN = Cors.Origin.any(); private static final Cors CORS = Cors.builder().maxAgeSeconds(1).origins(Collections.singleton(ORIGIN)).build(); private static final BatchRequest BATCH_REQUEST = BatchRequest.builder().delete("B", "N").build(); private static final BatchResponse BATCH_RESPONSE = new BatchResponse( Collections.singletonList(BatchResponse.Result.of(true)), - Collections.>emptyList(), - Collections.>emptyList()); - private static final PageImpl PAGE_RESULT = new PageImpl<>( - null, "c", Collections.singletonList(BlobInfo.builder("b", "n").build())); + Collections.>emptyList(), + Collections.>emptyList()); + private static final PageImpl PAGE_RESULT = + new PageImpl<>(null, "c", Collections.singletonList(Blob.builder(STORAGE, "b", "n").build())); private static final Storage.BlobListOption BLOB_LIST_OPTIONS = Storage.BlobListOption.maxResults(100); private static final Storage.BlobSourceOption BLOB_SOURCE_OPTIONS = @@ -94,8 +95,7 @@ public void testServiceOptions() throws Exception { @Test public void testModelAndRequests() throws Exception { - Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, BLOB_INFO, - BUCKET_INFO, + Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT, ACL_USER, ACL_RAW, BLOB, BUCKET, ORIGIN, CORS, BATCH_REQUEST, BATCH_RESPONSE, PAGE_RESULT, BLOB_LIST_OPTIONS, BLOB_SOURCE_OPTIONS, BLOB_TARGET_OPTIONS, BUCKET_LIST_OPTIONS, BUCKET_SOURCE_OPTIONS, BUCKET_TARGET_OPTIONS}; @@ -132,8 +132,8 @@ public void testWriteChannelState() throws IOException, ClassNotFoundException { .build(); // avoid closing when you don't want partial writes to GCS upon failure @SuppressWarnings("resource") - BlobWriteChannel writer = - new BlobWriteChannel(options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id"); + BlobWriteChannel writer = new BlobWriteChannel( + Blob.builder(options.service(), BlobId.of("b", "n")).build(), "upload-id"); RestorableState state = writer.capture(); RestorableState deserializedState = serializeAndDeserialize(state); assertEquals(state, deserializedState); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index 0e1f1a0b2f52..877b39504c69 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -84,17 +84,6 @@ public class StorageImplTest { private static final String CONTENT_CRC32C = "9N3EPQ=="; private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; - // BucketInfo objects - private static final BucketInfo BUCKET_INFO1 = - BucketInfo.builder(BUCKET_NAME1).metageneration(42L).build(); - private static final BucketInfo BUCKET_INFO2 = BucketInfo.builder(BUCKET_NAME2).build(); - - // BlobInfo objects - private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1, 24L) - .metageneration(42L).contentType("application/json").md5("md5string").build(); - private static final BlobInfo BLOB_INFO2 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME2).build(); - private static final BlobInfo BLOB_INFO3 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME3).build(); - // Empty StorageRpc options private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); @@ -103,9 +92,7 @@ public class StorageImplTest { Storage.BucketTargetOption.metagenerationMatch(); private static final Storage.BucketTargetOption BUCKET_TARGET_PREDEFINED_ACL = Storage.BucketTargetOption.predefinedAcl(Storage.PredefinedAcl.PRIVATE); - private static final Map BUCKET_TARGET_OPTIONS = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_INFO1.metageneration(), - StorageRpc.Option.PREDEFINED_ACL, BUCKET_TARGET_PREDEFINED_ACL.value()); + private static Map bucketTargetOptions; // Blob target options (create, update, compose) private static final Storage.BlobTargetOption BLOB_TARGET_GENERATION = @@ -116,16 +103,9 @@ public class StorageImplTest { Storage.BlobTargetOption.doesNotExist(); private static final Storage.BlobTargetOption BLOB_TARGET_PREDEFINED_ACL = Storage.BlobTargetOption.predefinedAcl(Storage.PredefinedAcl.PRIVATE); - private static final Map BLOB_TARGET_OPTIONS_CREATE = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_INFO1.metageneration(), - StorageRpc.Option.IF_GENERATION_MATCH, 0L, - StorageRpc.Option.PREDEFINED_ACL, BUCKET_TARGET_PREDEFINED_ACL.value()); - private static final Map BLOB_TARGET_OPTIONS_UPDATE = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_INFO1.metageneration(), - StorageRpc.Option.PREDEFINED_ACL, BUCKET_TARGET_PREDEFINED_ACL.value()); - private static final Map BLOB_TARGET_OPTIONS_COMPOSE = ImmutableMap.of( - StorageRpc.Option.IF_GENERATION_MATCH, BLOB_INFO1.generation(), - StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_INFO1.metageneration()); + private static Map blobTargetOptionsCreate; + private static Map blobTargetOptionsUpdate; + private static Map blobTargetOptionsCompose; // Blob write options (create, writer) private static final Storage.BlobWriteOption BLOB_WRITE_METAGENERATION = @@ -140,45 +120,31 @@ public class StorageImplTest { Storage.BlobWriteOption.crc32cMatch(); // Bucket get/source options - private static final Storage.BucketSourceOption BUCKET_SOURCE_METAGENERATION = - Storage.BucketSourceOption.metagenerationMatch(BUCKET_INFO1.metageneration()); - private static final Map BUCKET_SOURCE_OPTIONS = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_SOURCE_METAGENERATION.value()); - private static final Storage.BucketGetOption BUCKET_GET_METAGENERATION = - Storage.BucketGetOption.metagenerationMatch(BUCKET_INFO1.metageneration()); + private static Storage.BucketSourceOption bucketSourceMetageneration; + private static Map bucketSourceOptions; + private static Storage.BucketGetOption bucketGetMetageneration; private static final Storage.BucketGetOption BUCKET_GET_FIELDS = Storage.BucketGetOption.fields(Storage.BucketField.LOCATION, Storage.BucketField.ACL); private static final Storage.BucketGetOption BUCKET_GET_EMPTY_FIELDS = Storage.BucketGetOption.fields(); - private static final Map BUCKET_GET_OPTIONS = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_SOURCE_METAGENERATION.value()); + private static Map bucketGetOptions; // Blob get/source options - private static final Storage.BlobGetOption BLOB_GET_METAGENERATION = - Storage.BlobGetOption.metagenerationMatch(BLOB_INFO1.metageneration()); - private static final Storage.BlobGetOption BLOB_GET_GENERATION = - Storage.BlobGetOption.generationMatch(BLOB_INFO1.generation()); + private static Storage.BlobGetOption blobGetMetageneration; + private static Storage.BlobGetOption blobGetGeneration; private static final Storage.BlobGetOption BLOB_GET_GENERATION_FROM_BLOB_ID = Storage.BlobGetOption.generationMatch(); private static final Storage.BlobGetOption BLOB_GET_FIELDS = Storage.BlobGetOption.fields(Storage.BlobField.CONTENT_TYPE, Storage.BlobField.CRC32C); private static final Storage.BlobGetOption BLOB_GET_EMPTY_FIELDS = Storage.BlobGetOption.fields(); - private static final Map BLOB_GET_OPTIONS = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_GET_METAGENERATION.value(), - StorageRpc.Option.IF_GENERATION_MATCH, BLOB_GET_GENERATION.value()); - private static final Storage.BlobSourceOption BLOB_SOURCE_METAGENERATION = - Storage.BlobSourceOption.metagenerationMatch(BLOB_INFO1.metageneration()); - private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION = - Storage.BlobSourceOption.generationMatch(BLOB_INFO1.generation()); + private static Map blobGetOptions; + private static Storage.BlobSourceOption blobSourceMetageneration; + private static Storage.BlobSourceOption blobSourceGeneration; private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION_FROM_BLOB_ID = Storage.BlobSourceOption.generationMatch(); - private static final Map BLOB_SOURCE_OPTIONS = ImmutableMap.of( - StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_SOURCE_METAGENERATION.value(), - StorageRpc.Option.IF_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value()); - private static final Map BLOB_SOURCE_OPTIONS_COPY = ImmutableMap.of( - StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH, BLOB_SOURCE_METAGENERATION.value(), - StorageRpc.Option.IF_SOURCE_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value()); + private static Map blobSourceOptions; + private static Map blobSourceOptionsCopy; // Bucket list options private static final Storage.BucketListOption BUCKET_LIST_MAX_RESULT = @@ -206,6 +172,44 @@ public class StorageImplTest { StorageRpc.Option.MAX_RESULTS, BLOB_LIST_MAX_RESULT.value(), StorageRpc.Option.PREFIX, BLOB_LIST_PREFIX.value()); + private void initializeGenerationOptions() { + bucketTargetOptions = + ImmutableMap.of(StorageRpc.Option.IF_METAGENERATION_MATCH, bucket1.metageneration(), + StorageRpc.Option.PREDEFINED_ACL, BUCKET_TARGET_PREDEFINED_ACL.value()); + blobTargetOptionsCreate = ImmutableMap.of(StorageRpc.Option.IF_METAGENERATION_MATCH, + blob1.metageneration(), StorageRpc.Option.IF_GENERATION_MATCH, 0L, + StorageRpc.Option.PREDEFINED_ACL, BUCKET_TARGET_PREDEFINED_ACL.value()); + blobTargetOptionsUpdate = + ImmutableMap.of(StorageRpc.Option.IF_METAGENERATION_MATCH, blob1.metageneration(), + StorageRpc.Option.PREDEFINED_ACL, BUCKET_TARGET_PREDEFINED_ACL.value()); + blobTargetOptionsCompose = + ImmutableMap.of(StorageRpc.Option.IF_GENERATION_MATCH, blob1.generation(), + StorageRpc.Option.IF_METAGENERATION_MATCH, blob1.metageneration()); + bucketSourceMetageneration = + Storage.BucketSourceOption.metagenerationMatch(bucket1.metageneration()); + bucketSourceOptions = ImmutableMap.of( + StorageRpc.Option.IF_METAGENERATION_MATCH, bucketSourceMetageneration.value()); + bucketGetMetageneration = + Storage.BucketGetOption.metagenerationMatch(bucket1.metageneration()); + bucketGetOptions = ImmutableMap.of( + StorageRpc.Option.IF_METAGENERATION_MATCH, bucketSourceMetageneration.value()); + + blobGetMetageneration = Storage.BlobGetOption.metagenerationMatch(blob1.metageneration()); + blobGetGeneration = Storage.BlobGetOption.generationMatch(blob1.generation()); + blobGetOptions = + ImmutableMap.of(StorageRpc.Option.IF_METAGENERATION_MATCH, blobGetMetageneration.value(), + StorageRpc.Option.IF_GENERATION_MATCH, blobGetGeneration.value()); + blobSourceMetageneration = + Storage.BlobSourceOption.metagenerationMatch(blob1.metageneration()); + blobSourceGeneration = Storage.BlobSourceOption.generationMatch(blob1.generation()); + blobSourceOptions = + ImmutableMap.of(StorageRpc.Option.IF_METAGENERATION_MATCH, blobSourceMetageneration.value(), + StorageRpc.Option.IF_GENERATION_MATCH, blobSourceGeneration.value()); + blobSourceOptionsCopy = ImmutableMap.of(StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH, + blobSourceMetageneration.value(), StorageRpc.Option.IF_SOURCE_GENERATION_MATCH, + blobSourceGeneration.value()); + } + private static final String PRIVATE_KEY_STRING = "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoG" + "BAL2xolH1zrISQ8+GzOV29BNjjzq4/HIP8Psd1+cZb81vDklSF+95wB250MSE0BDc81pvIMwj5OmIfLg1NY6uB" + "1xavOPpVdx1z664AGc/BEJ1zInXGXaQ6s+SxGenVq40Yws57gikQGMZjttpf1Qbz4DjkxsbRoeaRHn06n9pH1e" @@ -237,6 +241,19 @@ public long millis() { private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; private Storage storage; + private Storage mockStorage; + + + // Bucket objects + private Bucket bucket1; + private Bucket expectedBucket1; + private Bucket bucket2; + + // Blob objects + private Blob blob1; + private Blob expectedBlob1; + private Blob blob2; + private Blob blob3; @Rule public ExpectedException thrown = ExpectedException.none(); @@ -257,7 +274,8 @@ public void setUp() { rpcFactoryMock = EasyMock.createMock(StorageRpcFactory.class); storageRpcMock = EasyMock.createMock(StorageRpc.class); EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(StorageOptions.class))) - .andReturn(storageRpcMock); + .andReturn(storageRpcMock) + .anyTimes(); EasyMock.replay(rpcFactoryMock); options = StorageOptions.builder() .projectId("projectId") @@ -265,6 +283,30 @@ public void setUp() { .serviceRpcFactory(rpcFactoryMock) .retryParams(RetryParams.noRetries()) .build(); + mockStorage = EasyMock.createMock(Storage.class); + EasyMock.expect(mockStorage.options()).andReturn(options).anyTimes(); + EasyMock.replay(mockStorage); + expectedBucket1 = Bucket.builder(mockStorage, BUCKET_NAME1).metageneration(42L).build(); + bucket2 = Bucket.builder(mockStorage, BUCKET_NAME2).build(); + expectedBlob1 = + Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME1, 24L) + .metageneration(42L) + .contentType("application/json") + .md5("md5string") + .build(); + blob2 = Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME2).build(); + blob3 = Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME3).build(); + } + + private void initializeObjectsWithServiceDependencies() { + storage = options.service(); + bucket1 = Bucket.builder(storage, BUCKET_NAME1).metageneration(42L).build(); + blob1 = Blob.builder(storage, BUCKET_NAME1, BLOB_NAME1, 24L) + .metageneration(42L) + .contentType("application/json") + .md5("md5string") + .build(); + initializeGenerationOptions(); } @After @@ -281,37 +323,40 @@ public void testGetOptions() { @Test public void testCreateBucket() { - EasyMock.expect(storageRpcMock.create(BUCKET_INFO1.toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(BUCKET_INFO1.toPb()); + EasyMock.expect(storageRpcMock.create(expectedBucket1.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(expectedBucket1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = storage.create(BUCKET_INFO1); - assertEquals(BUCKET_INFO1.toPb(), bucket.toPb()); + initializeObjectsWithServiceDependencies(); + Bucket bucket = storage.create(bucket1); + assertEquals(bucket1.toPb(), bucket.toPb()); } @Test public void testCreateBucketWithOptions() { - EasyMock.expect(storageRpcMock.create(BUCKET_INFO1.toPb(), BUCKET_TARGET_OPTIONS)) - .andReturn(BUCKET_INFO1.toPb()); + EasyMock.expect(storageRpcMock.create(expectedBucket1.toPb(), bucketTargetOptions)) + .andReturn(expectedBucket1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = - storage.create(BUCKET_INFO1, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL); - assertEquals(BUCKET_INFO1, bucket); + initializeObjectsWithServiceDependencies(); + Bucket bucket = + storage.create(bucket1, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL); + assertEquals(bucket1, bucket); } @Test public void testCreateBlob() throws IOException { Capture capturedStream = Capture.newInstance(); - EasyMock.expect(storageRpcMock.create( - EasyMock.eq(BLOB_INFO1.toBuilder().md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build().toPb()), + EasyMock.expect(storageRpcMock.create(EasyMock.eq(expectedBlob1.toBuilder() + .md5(CONTENT_MD5) + .crc32c(CONTENT_CRC32C) + .build() + .toPb()), EasyMock.capture(capturedStream), EasyMock.eq(EMPTY_RPC_OPTIONS))) - .andReturn(BLOB_INFO1.toPb()); + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.create(BLOB_INFO1, BLOB_CONTENT); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.create(blob1, BLOB_CONTENT); + assertEquals(blob1, blob); ByteArrayInputStream byteStream = capturedStream.getValue(); byte[] streamBytes = new byte[BLOB_CONTENT.length]; assertEquals(BLOB_CONTENT.length, byteStream.read(streamBytes)); @@ -323,18 +368,18 @@ public void testCreateBlob() throws IOException { public void testCreateEmptyBlob() throws IOException { Capture capturedStream = Capture.newInstance(); EasyMock.expect(storageRpcMock.create( - EasyMock.eq(BLOB_INFO1.toBuilder() +EasyMock.eq(expectedBlob1.toBuilder() .md5("1B2M2Y8AsgTpgAmY7PhCfg==") .crc32c("AAAAAA==") .build() .toPb()), EasyMock.capture(capturedStream), EasyMock.eq(EMPTY_RPC_OPTIONS))) - .andReturn(BLOB_INFO1.toPb()); + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.create(BLOB_INFO1); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.create(blob1); + assertEquals(blob1, blob); ByteArrayInputStream byteStream = capturedStream.getValue(); byte[] streamBytes = new byte[BLOB_CONTENT.length]; assertEquals(-1, byteStream.read(streamBytes)); @@ -343,21 +388,18 @@ public void testCreateEmptyBlob() throws IOException { @Test public void testCreateBlobWithOptions() throws IOException { Capture capturedStream = Capture.newInstance(); - EasyMock.expect(storageRpcMock.create( - EasyMock.eq(BLOB_INFO1.toBuilder() - .md5(CONTENT_MD5) - .crc32c(CONTENT_CRC32C) - .build() - .toPb()), - EasyMock.capture(capturedStream), - EasyMock.eq(BLOB_TARGET_OPTIONS_CREATE))) - .andReturn(BLOB_INFO1.toPb()); + EasyMock + .expect(storageRpcMock.create( + EasyMock.eq( + expectedBlob1.toBuilder().md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build().toPb()), + EasyMock.capture(capturedStream), EasyMock.eq(blobTargetOptionsCreate))) + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = - storage.create(BLOB_INFO1, BLOB_CONTENT, BLOB_TARGET_METAGENERATION, BLOB_TARGET_NOT_EXIST, + initializeObjectsWithServiceDependencies(); + Blob blob = + storage.create(blob1, BLOB_CONTENT, BLOB_TARGET_METAGENERATION, BLOB_TARGET_NOT_EXIST, BLOB_TARGET_PREDEFINED_ACL); - assertEquals(BLOB_INFO1, blob); + assertEquals(blob1, blob); ByteArrayInputStream byteStream = capturedStream.getValue(); byte[] streamBytes = new byte[BLOB_CONTENT.length]; assertEquals(BLOB_CONTENT.length, byteStream.read(streamBytes)); @@ -368,161 +410,169 @@ public void testCreateBlobWithOptions() throws IOException { @Test public void testCreateBlobFromStream() { ByteArrayInputStream fileStream = new ByteArrayInputStream(BLOB_CONTENT); - BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder(); - BlobInfo infoWithHashes = infoBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); - BlobInfo infoWithoutHashes = infoBuilder.md5(null).crc32c(null).build(); - EasyMock.expect(storageRpcMock.create(infoWithoutHashes.toPb(), fileStream, EMPTY_RPC_OPTIONS)) - .andReturn(BLOB_INFO1.toPb()); + Blob.Builder blobBuilder = expectedBlob1.toBuilder(); + Blob blobWithHashes = blobBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); + Blob blobWithoutHashes = blobBuilder.md5(null).crc32c(null).build(); + EasyMock.expect(storageRpcMock.create(blobWithoutHashes.toPb(), fileStream, EMPTY_RPC_OPTIONS)) + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.create(infoWithHashes, fileStream); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.create(blobWithHashes, fileStream); + assertEquals(blob1, blob); } @Test public void testGetBucket() { - EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(BUCKET_INFO1.toPb()); + EasyMock + .expect(storageRpcMock.get(Bucket.of(mockStorage, BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(expectedBucket1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = storage.get(BUCKET_NAME1); - assertEquals(BUCKET_INFO1, bucket); + initializeObjectsWithServiceDependencies(); + Bucket bucket = storage.get(BUCKET_NAME1); + assertEquals(bucket1, bucket); } @Test public void testGetBucketWithOptions() { - EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_GET_OPTIONS)) - .andReturn(BUCKET_INFO1.toPb()); + EasyMock + .expect( +storageRpcMock.get(Bucket.of(mockStorage, BUCKET_NAME1).toPb(), bucketGetOptions)) + .andReturn(expectedBucket1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION); - assertEquals(BUCKET_INFO1, bucket); + initializeObjectsWithServiceDependencies(); + Bucket bucket = storage.get(BUCKET_NAME1, bucketGetMetageneration); + assertEquals(bucket1, bucket); } @Test public void testGetBucketWithSelectedFields() { Capture> capturedOptions = Capture.newInstance(); - EasyMock.expect(storageRpcMock.get(EasyMock.eq(BucketInfo.of(BUCKET_NAME1).toPb()), - EasyMock.capture(capturedOptions))).andReturn(BUCKET_INFO1.toPb()); + EasyMock + .expect(storageRpcMock.get( + EasyMock.eq(Bucket.of(mockStorage, BUCKET_NAME1).toPb()), + EasyMock.capture(capturedOptions))).andReturn(expectedBucket1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, BUCKET_GET_FIELDS); - assertEquals(BUCKET_GET_METAGENERATION.value(), - capturedOptions.getValue().get(BUCKET_GET_METAGENERATION.rpcOption())); + initializeObjectsWithServiceDependencies(); + Bucket bucket = storage.get(BUCKET_NAME1, bucketGetMetageneration, BUCKET_GET_FIELDS); + assertEquals( + bucketGetMetageneration.value(), + capturedOptions.getValue().get(bucketGetMetageneration.rpcOption())); String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); assertTrue(selector.contains("name")); assertTrue(selector.contains("location")); assertTrue(selector.contains("acl")); assertEquals(17, selector.length()); - assertEquals(BUCKET_INFO1.name(), bucket.name()); + assertEquals(bucket1.name(), bucket.name()); } @Test public void testGetBucketWithEmptyFields() { Capture> capturedOptions = Capture.newInstance(); - EasyMock.expect(storageRpcMock.get(EasyMock.eq(BucketInfo.of(BUCKET_NAME1).toPb()), - EasyMock.capture(capturedOptions))).andReturn(BUCKET_INFO1.toPb()); + EasyMock + .expect(storageRpcMock.get( + EasyMock.eq(Bucket.of(mockStorage, BUCKET_NAME1).toPb()), + EasyMock.capture(capturedOptions))).andReturn(expectedBucket1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, + initializeObjectsWithServiceDependencies(); + Bucket bucket = storage.get(BUCKET_NAME1, bucketGetMetageneration, BUCKET_GET_EMPTY_FIELDS); - assertEquals(BUCKET_GET_METAGENERATION.value(), - capturedOptions.getValue().get(BUCKET_GET_METAGENERATION.rpcOption())); + assertEquals( + bucketGetMetageneration.value(), + capturedOptions.getValue().get(bucketGetMetageneration.rpcOption())); String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); assertTrue(selector.contains("name")); assertEquals(4, selector.length()); - assertEquals(BUCKET_INFO1.name(), bucket.name()); + assertEquals(bucket1.name(), bucket.name()); } @Test public void testGetBlob() { EasyMock.expect( storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(BLOB_INFO1.toPb()); + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.get(BUCKET_NAME1, BLOB_NAME1); + assertEquals(blob1, blob); } @Test public void testGetBlobWithOptions() { - EasyMock.expect( - storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_GET_OPTIONS)) - .andReturn(BLOB_INFO1.toPb()); + EasyMock.expect(storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), blobGetOptions)) + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = - storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, BLOB_GET_GENERATION); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.get(BUCKET_NAME1, BLOB_NAME1, blobGetMetageneration, blobGetGeneration); + assertEquals(blob1, blob); } @Test public void testGetBlobWithOptionsFromBlobId() { - EasyMock.expect( - storageRpcMock.get(BLOB_INFO1.blobId().toPb(), BLOB_GET_OPTIONS)) - .andReturn(BLOB_INFO1.toPb()); + EasyMock.expect(storageRpcMock.get(expectedBlob1.blobId().toPb(), blobGetOptions)) + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = - storage.get(BLOB_INFO1.blobId(), BLOB_GET_METAGENERATION, BLOB_GET_GENERATION_FROM_BLOB_ID); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = + storage.get(blob1.blobId(), blobGetMetageneration, BLOB_GET_GENERATION_FROM_BLOB_ID); + assertEquals(blob1, blob); } @Test public void testGetBlobWithSelectedFields() { Capture> capturedOptions = Capture.newInstance(); EasyMock.expect(storageRpcMock.get(EasyMock.eq(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()), - EasyMock.capture(capturedOptions))).andReturn(BLOB_INFO1.toPb()); + EasyMock.capture(capturedOptions))).andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, - BLOB_GET_GENERATION, BLOB_GET_FIELDS); - assertEquals(BLOB_GET_METAGENERATION.value(), - capturedOptions.getValue().get(BLOB_GET_METAGENERATION.rpcOption())); - assertEquals(BLOB_GET_GENERATION.value(), - capturedOptions.getValue().get(BLOB_GET_GENERATION.rpcOption())); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.get( + BUCKET_NAME1, BLOB_NAME1, blobGetMetageneration, blobGetGeneration, BLOB_GET_FIELDS); + assertEquals( + blobGetMetageneration.value(), + capturedOptions.getValue().get(blobGetMetageneration.rpcOption())); + assertEquals( + blobGetGeneration.value(), capturedOptions.getValue().get(blobGetGeneration.rpcOption())); String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); assertTrue(selector.contains("bucket")); assertTrue(selector.contains("name")); assertTrue(selector.contains("contentType")); assertTrue(selector.contains("crc32c")); assertEquals(30, selector.length()); - assertEquals(BLOB_INFO1, blob); + assertEquals(blob1, blob); } @Test public void testGetBlobWithEmptyFields() { Capture> capturedOptions = Capture.newInstance(); EasyMock.expect(storageRpcMock.get(EasyMock.eq(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()), - EasyMock.capture(capturedOptions))).andReturn(BLOB_INFO1.toPb()); + EasyMock.capture(capturedOptions))).andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, - BLOB_GET_GENERATION, BLOB_GET_EMPTY_FIELDS); - assertEquals(BLOB_GET_METAGENERATION.value(), - capturedOptions.getValue().get(BLOB_GET_METAGENERATION.rpcOption())); - assertEquals(BLOB_GET_GENERATION.value(), - capturedOptions.getValue().get(BLOB_GET_GENERATION.rpcOption())); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.get( + BUCKET_NAME1, BLOB_NAME1, blobGetMetageneration, blobGetGeneration, BLOB_GET_EMPTY_FIELDS); + assertEquals( + blobGetMetageneration.value(), + capturedOptions.getValue().get(blobGetMetageneration.rpcOption())); + assertEquals( + blobGetGeneration.value(), capturedOptions.getValue().get(blobGetGeneration.rpcOption())); String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); assertTrue(selector.contains("bucket")); assertTrue(selector.contains("name")); assertEquals(11, selector.length()); - assertEquals(BLOB_INFO1, blob); + assertEquals(blob1, blob); } @Test public void testListBuckets() { String cursor = "cursor"; - ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); + ImmutableList bucketList = ImmutableList.of(expectedBucket1, bucket2); Tuple> result = - Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(bucketList, Bucket.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(EMPTY_RPC_OPTIONS)).andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class)); } @Test @@ -530,38 +580,37 @@ public void testListBucketsEmpty() { EasyMock.expect(storageRpcMock.list(EMPTY_RPC_OPTIONS)).andReturn( Tuple.>of(null, null)); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(); assertNull(page.nextPageCursor()); - assertArrayEquals(ImmutableList.of().toArray(), - Iterables.toArray(page.values(), BucketInfo.class)); + assertArrayEquals(ImmutableList.of().toArray(), Iterables.toArray(page.values(), Bucket.class)); } @Test public void testListBucketsWithOptions() { String cursor = "cursor"; - ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); + ImmutableList bucketList = ImmutableList.of(expectedBucket1, bucket2); Tuple> result = - Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(bucketList, Bucket.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(BUCKET_LIST_OPTIONS)).andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(BUCKET_LIST_MAX_RESULT, BUCKET_LIST_PREFIX); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_LIST_MAX_RESULT, BUCKET_LIST_PREFIX); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class)); } @Test public void testListBucketsWithSelectedFields() { String cursor = "cursor"; Capture> capturedOptions = Capture.newInstance(); - ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); + ImmutableList bucketList = ImmutableList.of(expectedBucket1, bucket2); Tuple> result = - Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(bucketList, Bucket.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(EasyMock.capture(capturedOptions))).andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(BUCKET_LIST_FIELDS); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_LIST_FIELDS); String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption()); assertTrue(selector.contains("items")); assertTrue(selector.contains("name")); @@ -569,40 +618,40 @@ public void testListBucketsWithSelectedFields() { assertTrue(selector.contains("location")); assertEquals(24, selector.length()); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class)); } @Test public void testListBucketsWithEmptyFields() { String cursor = "cursor"; Capture> capturedOptions = Capture.newInstance(); - ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); + ImmutableList bucketList = ImmutableList.of(expectedBucket1, bucket2); Tuple> result = - Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(bucketList, Bucket.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(EasyMock.capture(capturedOptions))).andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(BUCKET_LIST_EMPTY_FIELDS); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_LIST_EMPTY_FIELDS); String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption()); assertTrue(selector.contains("items")); assertTrue(selector.contains("name")); assertEquals(11, selector.length()); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class)); } @Test public void testListBlobs() { String cursor = "cursor"; - ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + ImmutableList blobList = ImmutableList.of(expectedBlob1, blob2); Tuple> result = - Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(blobList, Blob.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, EMPTY_RPC_OPTIONS)).andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(BUCKET_NAME1); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_NAME1); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); } @Test @@ -611,40 +660,39 @@ public void testListBlobsEmpty() { .andReturn(Tuple.>of( null, null)); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(BUCKET_NAME1); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_NAME1); assertNull(page.nextPageCursor()); - assertArrayEquals(ImmutableList.of().toArray(), - Iterables.toArray(page.values(), BlobInfo.class)); + assertArrayEquals(ImmutableList.of().toArray(), Iterables.toArray(page.values(), Blob.class)); } @Test public void testListBlobsWithOptions() { String cursor = "cursor"; - ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + ImmutableList blobList = ImmutableList.of(expectedBlob1, blob2); Tuple> result = - Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(blobList, Blob.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, BLOB_LIST_OPTIONS)).andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX); + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); } @Test public void testListBlobsWithSelectedFields() { String cursor = "cursor"; Capture> capturedOptions = Capture.newInstance(); - ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + ImmutableList blobList = ImmutableList.of(expectedBlob1, blob2); Tuple> result = - Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(blobList, Blob.TO_PB_FUNCTION)); EasyMock.expect( storageRpcMock.list(EasyMock.eq(BUCKET_NAME1), EasyMock.capture(capturedOptions))) .andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_FIELDS); assertEquals(BLOB_LIST_MAX_RESULT.value(), capturedOptions.getValue().get(BLOB_LIST_MAX_RESULT.rpcOption())); @@ -658,22 +706,22 @@ public void testListBlobsWithSelectedFields() { assertTrue(selector.contains("md5Hash")); assertEquals(38, selector.length()); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); } @Test public void testListBlobsWithEmptyFields() { String cursor = "cursor"; Capture> capturedOptions = Capture.newInstance(); - ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + ImmutableList blobList = ImmutableList.of(expectedBlob1, blob2); Tuple> result = - Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); + Tuple.of(cursor, Iterables.transform(blobList, Blob.TO_PB_FUNCTION)); EasyMock.expect( storageRpcMock.list(EasyMock.eq(BUCKET_NAME1), EasyMock.capture(capturedOptions))) .andReturn(result); EasyMock.replay(storageRpcMock); - storage = options.service(); - Page page = + initializeObjectsWithServiceDependencies(); + Page page = storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_EMPTY_FIELDS); assertEquals(BLOB_LIST_MAX_RESULT.value(), capturedOptions.getValue().get(BLOB_LIST_MAX_RESULT.rpcOption())); @@ -685,73 +733,75 @@ public void testListBlobsWithEmptyFields() { assertTrue(selector.contains("name")); assertEquals(18, selector.length()); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); } @Test public void testUpdateBucket() { - BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().indexPage("some-page").build(); - EasyMock.expect(storageRpcMock.patch(updatedBucketInfo.toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(updatedBucketInfo.toPb()); + Bucket updatedBucket = expectedBucket1.toBuilder().indexPage("some-page").build(); + EasyMock.expect(storageRpcMock.patch(updatedBucket.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedBucket.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = storage.update(updatedBucketInfo); - assertEquals(updatedBucketInfo, bucket); + initializeObjectsWithServiceDependencies(); + Bucket bucket = storage.update(updatedBucket); + assertEquals(updatedBucket, bucket); } @Test public void testUpdateBucketWithOptions() { - BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().indexPage("some-page").build(); - EasyMock.expect(storageRpcMock.patch(updatedBucketInfo.toPb(), BUCKET_TARGET_OPTIONS)) - .andReturn(updatedBucketInfo.toPb()); + Bucket updatedBucket = expectedBucket1.toBuilder().indexPage("some-page").build(); + EasyMock.expect(storageRpcMock.patch(updatedBucket.toPb(), bucketTargetOptions)) + .andReturn(updatedBucket.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BucketInfo bucket = - storage.update(updatedBucketInfo, BUCKET_TARGET_METAGENERATION, + initializeObjectsWithServiceDependencies(); + Bucket bucket = + storage.update(updatedBucket, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL); - assertEquals(updatedBucketInfo, bucket); + assertEquals(updatedBucket, bucket); } @Test public void testUpdateBlob() { - BlobInfo updatedBlobInfo = BLOB_INFO1.toBuilder().contentType("some-content-type").build(); - EasyMock.expect(storageRpcMock.patch(updatedBlobInfo.toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(updatedBlobInfo.toPb()); + Blob updatedBlob = expectedBlob1.toBuilder().contentType("some-content-type").build(); + EasyMock.expect(storageRpcMock.patch(updatedBlob.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedBlob.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.update(updatedBlobInfo); - assertEquals(updatedBlobInfo, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.update(updatedBlob); + assertEquals(updatedBlob, blob); } @Test public void testUpdateBlobWithOptions() { - BlobInfo updatedBlobInfo = BLOB_INFO1.toBuilder().contentType("some-content-type").build(); - EasyMock.expect(storageRpcMock.patch(updatedBlobInfo.toPb(), BLOB_TARGET_OPTIONS_UPDATE)) - .andReturn(updatedBlobInfo.toPb()); + Blob updatedBlob = expectedBlob1.toBuilder().contentType("some-content-type").build(); + EasyMock.expect(storageRpcMock.patch(updatedBlob.toPb(), blobTargetOptionsUpdate)) + .andReturn(updatedBlob.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = - storage.update(updatedBlobInfo, BLOB_TARGET_METAGENERATION, BLOB_TARGET_PREDEFINED_ACL); - assertEquals(updatedBlobInfo, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.update(updatedBlob, BLOB_TARGET_METAGENERATION, BLOB_TARGET_PREDEFINED_ACL); + assertEquals(updatedBlob, blob); } @Test public void testDeleteBucket() { - EasyMock.expect(storageRpcMock.delete(BucketInfo.of(BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS)) + EasyMock + .expect( + storageRpcMock.delete(Bucket.of(mockStorage, BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); assertTrue(storage.delete(BUCKET_NAME1)); } @Test public void testDeleteBucketWithOptions() { EasyMock - .expect(storageRpcMock.delete(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_SOURCE_OPTIONS)) + .expect( + storageRpcMock.delete(Bucket.of(mockStorage, BUCKET_NAME1).toPb(), bucketSourceOptions)) .andReturn(true); EasyMock.replay(storageRpcMock); - storage = options.service(); - assertTrue(storage.delete(BUCKET_NAME1, BUCKET_SOURCE_METAGENERATION)); + initializeObjectsWithServiceDependencies(); + assertTrue(storage.delete(BUCKET_NAME1, bucketSourceMetageneration)); } @Test @@ -760,71 +810,77 @@ public void testDeleteBlob() { storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(true); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); assertTrue(storage.delete(BUCKET_NAME1, BLOB_NAME1)); } @Test public void testDeleteBlobWithOptions() { EasyMock.expect( - storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS)) + storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), blobSourceOptions)) .andReturn(true); EasyMock.replay(storageRpcMock); - storage = options.service(); - assertTrue(storage.delete(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_GENERATION, - BLOB_SOURCE_METAGENERATION)); + initializeObjectsWithServiceDependencies(); + assertTrue( + storage.delete(BUCKET_NAME1, BLOB_NAME1, blobSourceGeneration, blobSourceMetageneration)); } @Test public void testDeleteBlobWithOptionsFromBlobId() { - EasyMock.expect( - storageRpcMock.delete(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS)) + EasyMock.expect(storageRpcMock.delete(expectedBlob1.blobId().toPb(), blobSourceOptions)) .andReturn(true); EasyMock.replay(storageRpcMock); - storage = options.service(); - assertTrue(storage.delete(BLOB_INFO1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, - BLOB_SOURCE_METAGENERATION)); + initializeObjectsWithServiceDependencies(); + assertTrue(storage.delete( + blob1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, blobSourceMetageneration)); } @Test public void testCompose() { Storage.ComposeRequest req = Storage.ComposeRequest.builder() .addSource(BLOB_NAME2, BLOB_NAME3) - .target(BLOB_INFO1) + .target(expectedBlob1) .build(); - EasyMock.expect(storageRpcMock.compose(ImmutableList.of(BLOB_INFO2.toPb(), BLOB_INFO3.toPb()), - BLOB_INFO1.toPb(), EMPTY_RPC_OPTIONS)).andReturn(BLOB_INFO1.toPb()); + EasyMock + .expect(storageRpcMock.compose( + ImmutableList.of(blob2.toPb(), blob3.toPb()), + expectedBlob1.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.compose(req); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.compose(req); + assertEquals(blob1, blob); } @Test public void testComposeWithOptions() { Storage.ComposeRequest req = Storage.ComposeRequest.builder() .addSource(BLOB_NAME2, BLOB_NAME3) - .target(BLOB_INFO1) + .target(expectedBlob1) .targetOptions(BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); - EasyMock.expect(storageRpcMock.compose(ImmutableList.of(BLOB_INFO2.toPb(), BLOB_INFO3.toPb()), - BLOB_INFO1.toPb(), BLOB_TARGET_OPTIONS_COMPOSE)).andReturn(BLOB_INFO1.toPb()); + EasyMock + .expect(storageRpcMock.compose( + ImmutableList.of(blob2.toPb(), blob3.toPb()), + expectedBlob1.toPb(), blobTargetOptionsCompose)) + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); - storage = options.service(); - BlobInfo blob = storage.compose(req); - assertEquals(BLOB_INFO1, blob); + initializeObjectsWithServiceDependencies(); + Blob blob = storage.compose(req); + assertEquals(blob1, blob); } @Test public void testCopy() { - CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); + CopyRequest request = + Storage.CopyRequest.of(mockStorage, expectedBlob1.blobId(), blob2.blobId()); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), EMPTY_RPC_OPTIONS, request.target().toPb(), EMPTY_RPC_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); CopyWriter writer = storage.copy(request); assertEquals(42L, writer.blobSize()); assertEquals(21L, writer.totalBytesCopied()); @@ -834,17 +890,17 @@ public void testCopy() { @Test public void testCopyWithOptions() { CopyRequest request = Storage.CopyRequest.builder() - .source(BLOB_INFO2.blobId()) - .sourceOptions(BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION) - .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) + .source(blob2.blobId()) + .sourceOptions(blobSourceGeneration, blobSourceMetageneration) + .target(expectedBlob1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), - BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + blobSourceOptionsCopy, request.target().toPb(), blobTargetOptionsCompose, null); StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); CopyWriter writer = storage.copy(request); assertEquals(42L, writer.blobSize()); assertEquals(21L, writer.totalBytesCopied()); @@ -854,17 +910,17 @@ public void testCopyWithOptions() { @Test public void testCopyWithOptionsFromBlobId() { CopyRequest request = Storage.CopyRequest.builder() - .source(BLOB_INFO1.blobId()) - .sourceOptions(BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION) - .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) + .source(expectedBlob1.blobId()) + .sourceOptions(BLOB_SOURCE_GENERATION_FROM_BLOB_ID, blobSourceMetageneration) + .target(expectedBlob1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), - BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + blobSourceOptionsCopy, request.target().toPb(), blobTargetOptionsCompose, null); StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); CopyWriter writer = storage.copy(request); assertEquals(42L, writer.blobSize()); assertEquals(21L, writer.totalBytesCopied()); @@ -873,22 +929,23 @@ public void testCopyWithOptionsFromBlobId() { @Test public void testCopyMultipleRequests() { - CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); + CopyRequest request = + Storage.CopyRequest.of(mockStorage, expectedBlob1.blobId(), blob2.blobId()); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), EMPTY_RPC_OPTIONS, request.target().toPb(), EMPTY_RPC_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse1 = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); - StorageRpc.RewriteResponse rpcResponse2 = new StorageRpc.RewriteResponse(rpcRequest, - BLOB_INFO1.toPb(), 42L, true, "token", 42L); + StorageRpc.RewriteResponse rpcResponse2 = + new StorageRpc.RewriteResponse(rpcRequest, expectedBlob1.toPb(), 42L, true, "token", 42L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse1); EasyMock.expect(storageRpcMock.continueRewrite(rpcResponse1)).andReturn(rpcResponse2); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); CopyWriter writer = storage.copy(request); assertEquals(42L, writer.blobSize()); assertEquals(21L, writer.totalBytesCopied()); assertTrue(!writer.isDone()); - assertEquals(BLOB_INFO1, writer.result()); + assertEquals(blob1, writer.result()); assertTrue(writer.isDone()); assertEquals(42L, writer.totalBytesCopied()); assertEquals(42L, writer.blobSize()); @@ -900,7 +957,7 @@ public void testReadAllBytes() { storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(BLOB_CONTENT); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); byte[] readBytes = storage.readAllBytes(BUCKET_NAME1, BLOB_NAME1); assertArrayEquals(BLOB_CONTENT, readBytes); } @@ -908,24 +965,23 @@ public void testReadAllBytes() { @Test public void testReadAllBytesWithOptions() { EasyMock.expect( - storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS)) + storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), blobSourceOptions)) .andReturn(BLOB_CONTENT); EasyMock.replay(storageRpcMock); - storage = options.service(); - byte[] readBytes = storage.readAllBytes(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_GENERATION, - BLOB_SOURCE_METAGENERATION); + initializeObjectsWithServiceDependencies(); + byte[] readBytes = storage.readAllBytes( + BUCKET_NAME1, BLOB_NAME1, blobSourceGeneration, blobSourceMetageneration); assertArrayEquals(BLOB_CONTENT, readBytes); } @Test public void testReadAllBytesWithOptionsFromBlobId() { - EasyMock.expect( - storageRpcMock.load(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS)) + EasyMock.expect(storageRpcMock.load(expectedBlob1.blobId().toPb(), blobSourceOptions)) .andReturn(BLOB_CONTENT); EasyMock.replay(storageRpcMock); - storage = options.service(); - byte[] readBytes = storage.readAllBytes(BLOB_INFO1.blobId(), - BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); + initializeObjectsWithServiceDependencies(); + byte[] readBytes = storage.readAllBytes( + blob1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, blobSourceMetageneration); assertArrayEquals(BLOB_CONTENT, readBytes); } @@ -933,7 +989,7 @@ public void testReadAllBytesWithOptionsFromBlobId() { public void testApply() { BatchRequest req = BatchRequest.builder() .delete(BUCKET_NAME1, BLOB_NAME1) - .update(BLOB_INFO2) + .update(blob2) .get(BUCKET_NAME1, BLOB_NAME3) .build(); List toDelete = ImmutableList.of(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()); @@ -974,7 +1030,7 @@ public Tuple apply(StorageObject f) { Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); BatchResponse batchResponse = storage.apply(req); // Verify captured StorageRpc.BatchRequest @@ -1012,7 +1068,7 @@ public Tuple apply(StorageObject f) { @Test public void testReader() { EasyMock.replay(storageRpcMock); - storage = options.service(); + initializeObjectsWithServiceDependencies(); ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1); assertNotNull(channel); assertTrue(channel.isOpen()); @@ -1021,13 +1077,12 @@ public void testReader() { @Test public void testReaderWithOptions() throws IOException { byte[] result = new byte[DEFAULT_CHUNK_SIZE]; - EasyMock.expect( - storageRpcMock.read(BLOB_INFO2.toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + EasyMock.expect(storageRpcMock.read(blob2.toPb(), blobSourceOptions, 0, DEFAULT_CHUNK_SIZE)) .andReturn(StorageRpc.Tuple.of("etag", result)); EasyMock.replay(storageRpcMock); - storage = options.service(); - ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, - BLOB_SOURCE_METAGENERATION); + initializeObjectsWithServiceDependencies(); + ReadChannel channel = + storage.reader(BUCKET_NAME1, BLOB_NAME2, blobSourceGeneration, blobSourceMetageneration); assertNotNull(channel); assertTrue(channel.isOpen()); channel.read(ByteBuffer.allocate(42)); @@ -1036,13 +1091,13 @@ public void testReaderWithOptions() throws IOException { @Test public void testReaderWithOptionsFromBlobId() throws IOException { byte[] result = new byte[DEFAULT_CHUNK_SIZE]; - EasyMock.expect( - storageRpcMock.read(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + EasyMock.expect(storageRpcMock.read(expectedBlob1.blobId().toPb(), blobSourceOptions, 0, + DEFAULT_CHUNK_SIZE)) .andReturn(StorageRpc.Tuple.of("etag", result)); EasyMock.replay(storageRpcMock); - storage = options.service(); - ReadChannel channel = storage.reader(BLOB_INFO1.blobId(), - BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); + initializeObjectsWithServiceDependencies(); + ReadChannel channel = storage.reader( + blob1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, blobSourceMetageneration); assertNotNull(channel); assertTrue(channel.isOpen()); channel.read(ByteBuffer.allocate(42)); @@ -1050,26 +1105,26 @@ public void testReaderWithOptionsFromBlobId() throws IOException { @Test public void testWriter() { - BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder(); - BlobInfo infoWithHashes = infoBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); - BlobInfo infoWithoutHashes = infoBuilder.md5(null).crc32c(null).build(); - EasyMock.expect(storageRpcMock.open(infoWithoutHashes.toPb(), EMPTY_RPC_OPTIONS)) + Blob.Builder blobBuilder = expectedBlob1.toBuilder(); + Blob blobWithHashes = blobBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); + Blob blobWithoutHashes = blobBuilder.md5(null).crc32c(null).build(); + EasyMock.expect(storageRpcMock.open(blobWithoutHashes.toPb(), EMPTY_RPC_OPTIONS)) .andReturn("upload-id"); EasyMock.replay(storageRpcMock); - storage = options.service(); - WriteChannel channel = storage.writer(infoWithHashes); + initializeObjectsWithServiceDependencies(); + WriteChannel channel = storage.writer(blobWithHashes); assertNotNull(channel); assertTrue(channel.isOpen()); } @Test public void testWriterWithOptions() { - BlobInfo info = BLOB_INFO1.toBuilder().md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); - EasyMock.expect(storageRpcMock.open(info.toPb(), BLOB_TARGET_OPTIONS_CREATE)) + Blob blob = expectedBlob1.toBuilder().md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); + EasyMock.expect(storageRpcMock.open(blob.toPb(), blobTargetOptionsCreate)) .andReturn("upload-id"); EasyMock.replay(storageRpcMock); - storage = options.service(); - WriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST, + initializeObjectsWithServiceDependencies(); + WriteChannel channel = storage.writer(blob, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST, BLOB_WRITE_PREDEFINED_ACL, BLOB_WRITE_CRC2C, BLOB_WRITE_MD5_HASH); assertNotNull(channel); assertTrue(channel.isOpen()); @@ -1079,10 +1134,11 @@ public void testWriterWithOptions() { public void testSignUrl() throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, UnsupportedEncodingException { EasyMock.replay(storageRpcMock); + initializeObjectsWithServiceDependencies(); ServiceAccountAuthCredentials authCredentials = ServiceAccountAuthCredentials.createFor(ACCOUNT, privateKey); storage = options.toBuilder().authCredentials(authCredentials).build().service(); - URL url = storage.signUrl(BLOB_INFO1, 14, TimeUnit.DAYS); + URL url = storage.signUrl(blob1, 14, TimeUnit.DAYS); String stringUrl = url.toString(); String expectedUrl = new StringBuilder("https://storage.googleapis.com/").append(BUCKET_NAME1).append("/") @@ -1107,11 +1163,12 @@ public void testSignUrl() throws NoSuchAlgorithmException, InvalidKeyException, public void testSignUrlWithOptions() throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, UnsupportedEncodingException { EasyMock.replay(storageRpcMock); + initializeObjectsWithServiceDependencies(); ServiceAccountAuthCredentials authCredentials = ServiceAccountAuthCredentials.createFor(ACCOUNT, privateKey); storage = options.toBuilder().authCredentials(authCredentials).build().service(); URL url = - storage.signUrl(BLOB_INFO1, 14, TimeUnit.DAYS, + storage.signUrl(blob1, 14, TimeUnit.DAYS, Storage.SignUrlOption.httpMethod(HttpMethod.POST), Storage.SignUrlOption.withContentType(), Storage.SignUrlOption.withMd5()); String stringUrl = url.toString(); @@ -1123,8 +1180,8 @@ public void testSignUrlWithOptions() throws NoSuchAlgorithmException, InvalidKey String signature = stringUrl.substring(expectedUrl.length()); StringBuilder signedMessageBuilder = new StringBuilder(); - signedMessageBuilder.append(HttpMethod.POST).append('\n').append(BLOB_INFO1.md5()).append('\n') - .append(BLOB_INFO1.contentType()).append('\n').append(42L + 1209600).append('\n') + signedMessageBuilder.append(HttpMethod.POST).append('\n').append(blob1.md5()).append('\n') + .append(blob1.contentType()).append('\n').append(42L + 1209600).append('\n') .append("/").append(BUCKET_NAME1).append("/").append(BLOB_NAME1); Signature signer = Signature.getInstance("SHA256withRSA"); @@ -1154,12 +1211,11 @@ public Tuple apply(StorageObject f) { StorageRpc.BatchResponse res = new StorageRpc.BatchResponse(deleteResult, updateResult, getResult); - Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); EasyMock.replay(storageRpcMock); - storage = options.service(); - List resultBlobs = storage.get(blobId1, blobId2); + initializeObjectsWithServiceDependencies(); + List resultBlobs = storage.get(blobId1, blobId2); // Verify captured StorageRpc.BatchRequest List>> capturedToGet = @@ -1179,10 +1235,12 @@ public Tuple apply(StorageObject f) { @Test public void testUpdateAll() { - BlobInfo blobInfo1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1).contentType("type").build(); - BlobInfo blobInfo2 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME2).contentType("type").build(); - StorageObject storageObject1 = blobInfo1.toPb(); - StorageObject storageObject2 = blobInfo2.toPb(); + Blob blob1 = + Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME1).contentType("type").build(); + Blob blob2 = + Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME2).contentType("type").build(); + StorageObject storageObject1 = blob1.toPb(); + StorageObject storageObject2 = blob2.toPb(); List toUpdate = ImmutableList.of(storageObject1, storageObject2); Map> deleteResult = ImmutableMap.of(); @@ -1197,12 +1255,11 @@ public Tuple apply(StorageObject f) { StorageRpc.BatchResponse res = new StorageRpc.BatchResponse(deleteResult, updateResult, getResult); - Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); EasyMock.replay(storageRpcMock); - storage = options.service(); - List resultBlobs = storage.update(blobInfo1, blobInfo2); + initializeObjectsWithServiceDependencies(); + List resultBlobs = storage.update(blob1, blob2); // Verify captured StorageRpc.BatchRequest List>> capturedToUpdate = @@ -1222,10 +1279,10 @@ public Tuple apply(StorageObject f) { @Test public void testDeleteAll() { - BlobInfo blobInfo1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1).build(); - BlobInfo blobInfo2 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME2).build(); - StorageObject storageObject1 = blobInfo1.toPb(); - StorageObject storageObject2 = blobInfo2.toPb(); + Blob blob1 = Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME1).build(); + Blob blob2 = Blob.builder(mockStorage, BUCKET_NAME1, BLOB_NAME2).build(); + StorageObject storageObject1 = blob1.toPb(); + StorageObject storageObject2 = blob2.toPb(); List toUpdate = ImmutableList.of(storageObject1, storageObject2); Map> updateResult = ImmutableMap.of(); @@ -1243,8 +1300,8 @@ public Tuple apply(StorageObject f) { Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); EasyMock.replay(storageRpcMock); - storage = options.service(); - List deleteResults = storage.delete(blobInfo1.blobId(), blobInfo2.blobId()); + initializeObjectsWithServiceDependencies(); + List deleteResults = storage.delete(blob1.blobId(), blob2.blobId()); // Verify captured StorageRpc.BatchRequest List>> capturedToDelete = @@ -1267,11 +1324,13 @@ public void testRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS)) .andThrow(new StorageException(500, "InternalError", true)) - .andReturn(BLOB_INFO1.toPb()); + .andReturn(expectedBlob1.toPb()); EasyMock.replay(storageRpcMock); + initializeObjectsWithServiceDependencies(); storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); - BlobInfo readBlob = storage.get(blob); - assertEquals(BLOB_INFO1, readBlob); + blob1 = blob1.toBuilder().storage(storage).build(); + Blob readBlob = storage.get(blob); + assertEquals(blob1, readBlob); } @Test