From 2aa9e32f0e99cd40259133bf1a6f3e67673f7df5 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Mon, 5 Oct 2015 10:47:37 +0200 Subject: [PATCH 1/5] Add integration test config to pom.xml --- pom.xml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/pom.xml b/pom.xml index 660ca29cc15f..38219f41e796 100644 --- a/pom.xml +++ b/pom.xml @@ -99,6 +99,17 @@ + + org.apache.maven.plugins + maven-surefire-plugin + 2.18 + + + + **/IT*.java + + + org.apache.maven.plugins maven-enforcer-plugin From f02bd975f09147d11f2cf9484d10cd88fc9bd570 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Mon, 5 Oct 2015 10:50:33 +0200 Subject: [PATCH 2/5] Add storage integration tests --- .../google/gcloud/storage/ITStorageTest.java | 470 ++++++++++++++++++ .../gcloud/storage/RemoteGcsHelper.java | 90 ++++ 2 files changed, 560 insertions(+) create mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java create mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java new file mode 100644 index 000000000000..714c680601f2 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -0,0 +1,470 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableList; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Calendar; + +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class ITStorageTest { + + private static StorageOptions options; + private static Storage storage; + private static RemoteGcsHelper gcsHelper; + private static String bucket; + + private static final String CONTENT_TYPE = "text/plain"; + private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD}; + private static final String BLOB_STRING_CONTENT = "Hello Google Cloud Storage!"; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @BeforeClass + public static void beforeClass() { + gcsHelper = RemoteGcsHelper.create(); + if (gcsHelper != null) { + options = gcsHelper.options(); + storage = StorageFactory.instance().get(options); + bucket = gcsHelper.bucket(); + storage.create(BucketInfo.of(bucket)); + } + } + + @AfterClass + public static void afterClass() { + if (storage != null) { + for (BlobInfo info : storage.list(bucket)) { + storage.delete(bucket, info.name()); + } + storage.delete(bucket); + } + } + + @Before + public void beforeMethod() { + org.junit.Assume.assumeNotNull(storage); + } + + @Test + public void testListBuckets() { + ListResult bucketList = storage.list(Storage.BucketListOption.prefix(bucket)); + for (BucketInfo bucketInfo : bucketList) { + assertTrue(bucketInfo.name().startsWith(bucket)); + } + } + + @Test + public void testCreateBlob() { + String blobName = "test-create-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + BlobInfo remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT); + assertNotNull(remoteBlob); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); + byte[] readBytes = storage.readAllBytes(bucket, blobName); + assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testCreateEmptyBlob() { + String blobName = "test-create-empty-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + BlobInfo remoteBlob = storage.create(blob); + assertNotNull(remoteBlob); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); + byte[] readBytes = storage.readAllBytes(bucket, blobName); + assertArrayEquals(new byte[0], readBytes); + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testCreateBlobStream() throws UnsupportedEncodingException { + String blobName = "test-create-blob-stream"; + BlobInfo blob = BlobInfo.builder(bucket, blobName).contentType(CONTENT_TYPE).build(); + ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8)); + BlobInfo remoteBlob = storage.create(blob, stream); + assertNotNull(remoteBlob); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); + assertEquals(blob.contentType(), remoteBlob.contentType()); + byte[] readBytes = storage.readAllBytes(bucket, blobName); + assertEquals(BLOB_STRING_CONTENT, new String(readBytes, UTF_8)); + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testCreateBlobFail() { + String blobName = "test-create-blob-fail"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(blob)); + try { + storage.create(blob.toBuilder().generation(42L).build(), BLOB_BYTE_CONTENT, + Storage.BlobTargetOption.generationMatch()); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testUpdateBlob() { + String blobName = "test-update-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(blob)); + BlobInfo updatedBlob = storage.update(blob.toBuilder().contentType(CONTENT_TYPE).build()); + assertNotNull(updatedBlob); + assertEquals(blob.bucket(), updatedBlob.bucket()); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(CONTENT_TYPE, updatedBlob.contentType()); + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testUpdateBlobFail() { + String blobName = "test-update-blob-fail"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(blob)); + try { + storage.update(blob.toBuilder().contentType(CONTENT_TYPE).generation(42L).build(), + Storage.BlobTargetOption.generationMatch()); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testDeleteNonExistingBlob() { + String blobName = "test-delete-non-existing-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertTrue(!storage.delete(bucket, blob.name())); + } + + @Test + public void testDeleteBlobFail() { + String blobName = "test-delete-blob-fail"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(blob)); + try { + storage.delete(bucket, blob.name(), Storage.BlobSourceOption.generationMatch(42L)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(bucket, blob.name())); + } + + @Test + public void testComposeBlob() { + String sourceBlobName1 = "test-compose-blob-source-1"; + String sourceBlobName2 = "test-compose-blob-source-2"; + BlobInfo sourceBlob1 = BlobInfo.of(bucket, sourceBlobName1); + BlobInfo sourceBlob2 = BlobInfo.of(bucket, sourceBlobName2); + assertNotNull(storage.create(sourceBlob1, BLOB_BYTE_CONTENT)); + assertNotNull(storage.create(sourceBlob2, BLOB_BYTE_CONTENT)); + String targetBlobName = "test-compose-blob-target"; + BlobInfo targetBlob = BlobInfo.of(bucket, targetBlobName); + Storage.ComposeRequest req = + Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob); + BlobInfo remoteBlob = storage.compose(req); + assertNotNull(remoteBlob); + assertEquals(bucket, remoteBlob.bucket()); + assertEquals(targetBlobName, remoteBlob.name()); + byte[] readBytes = storage.readAllBytes(bucket, targetBlobName); + byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2); + System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length, + BLOB_BYTE_CONTENT.length); + assertArrayEquals(composedBytes, readBytes); + assertTrue(storage.delete(bucket, sourceBlobName1)); + assertTrue(storage.delete(bucket, sourceBlobName2)); + assertTrue(storage.delete(bucket, targetBlobName)); + } + + @Test + public void testComposeBlobFail() { + String sourceBlobName1 = "test-compose-blob-fail-source-1"; + String sourceBlobName2 = "test-compose-blob-fail-source-2"; + BlobInfo sourceBlob1 = BlobInfo.of(bucket, sourceBlobName1); + BlobInfo sourceBlob2 = BlobInfo.of(bucket, sourceBlobName2); + assertNotNull(storage.create(sourceBlob1)); + assertNotNull(storage.create(sourceBlob2)); + String targetBlobName = "test-compose-blob-fail-target"; + BlobInfo targetBlob = BlobInfo.of(bucket, targetBlobName); + Storage.ComposeRequest req = Storage.ComposeRequest.builder() + .addSource(sourceBlobName1, 42L) + .addSource(sourceBlobName2, 42L) + .target(targetBlob) + .build(); + try { + storage.compose(req); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(bucket, sourceBlobName1)); + assertTrue(storage.delete(bucket, sourceBlobName2)); + } + + @Test + public void testCopyBlob() { + String sourceBlobName = "test-copy-blob-source"; + BlobInfo blob = BlobInfo.of(bucket, sourceBlobName); + assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT)); + String targetBlobName = "test-copy-blob-target"; + Storage.CopyRequest req = Storage.CopyRequest.of(bucket, sourceBlobName, targetBlobName); + BlobInfo remoteBlob = storage.copy(req); + assertNotNull(remoteBlob); + assertEquals(bucket, remoteBlob.bucket()); + assertEquals(targetBlobName, remoteBlob.name()); + byte[] readBytes = storage.readAllBytes(bucket, targetBlobName); + assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); + assertTrue(storage.delete(bucket, sourceBlobName)); + assertTrue(storage.delete(bucket, targetBlobName)); + } + + @Test + public void testCopyBlobUpdateMetadata() { + String sourceBlobName = "test-copy-blob-update-metadata-source"; + BlobInfo sourceBlob = BlobInfo.of(bucket, sourceBlobName); + assertNotNull(storage.create(sourceBlob)); + String targetBlobName = "test-copy-blob-update-metadata-target"; + BlobInfo targetBlob = + BlobInfo.builder(bucket, targetBlobName).contentType(CONTENT_TYPE).build(); + Storage.CopyRequest req = Storage.CopyRequest.of(bucket, sourceBlobName, targetBlob); + BlobInfo remoteBlob = storage.copy(req); + assertNotNull(remoteBlob); + assertEquals(bucket, remoteBlob.bucket()); + assertEquals(targetBlobName, remoteBlob.name()); + assertEquals(CONTENT_TYPE, remoteBlob.contentType()); + assertTrue(storage.delete(bucket, sourceBlobName)); + assertTrue(storage.delete(bucket, targetBlobName)); + } + + @Test + public void testCopyBlobFail() { + String sourceBlobName = "test-copy-blob-fail-source"; + BlobInfo blob = BlobInfo.of(bucket, sourceBlobName); + assertNotNull(storage.create(blob)); + String targetBlobName = "test-copy-blob-fail-target"; + Storage.CopyRequest req = new Storage.CopyRequest.Builder() + .source(bucket, sourceBlobName) + .target(BlobInfo.builder(bucket, targetBlobName).build()) + .sourceOptions(Storage.BlobSourceOption.metagenerationMatch(42L)) + .build(); + try { + storage.copy(req); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(bucket, sourceBlobName)); + } + + @Test + public void testBatchRequest() { + String sourceBlobName1 = "test-batch-request-blob-1"; + String sourceBlobName2 = "test-batch-request-blob-2"; + BlobInfo sourceBlob1 = BlobInfo.of(bucket, sourceBlobName1); + BlobInfo sourceBlob2 = BlobInfo.of(bucket, sourceBlobName2); + assertNotNull(storage.create(sourceBlob1)); + assertNotNull(storage.create(sourceBlob2)); + + // Batch update request + BlobInfo updatedBlob1 = sourceBlob1.toBuilder().contentType(CONTENT_TYPE).build(); + BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); + BatchRequest updateRequest = BatchRequest.builder() + .update(updatedBlob1) + .update(updatedBlob2) + .build(); + BatchResponse updateResponse = storage.apply(updateRequest); + assertEquals(2, updateResponse.updates().size()); + assertEquals(0, updateResponse.deletes().size()); + assertEquals(0, updateResponse.gets().size()); + BlobInfo remoteUpdatedBlob1 = updateResponse.updates().get(0).get(); + BlobInfo remoteUpdatedBlob2 = updateResponse.updates().get(1).get(); + assertEquals(bucket, remoteUpdatedBlob1.bucket()); + assertEquals(bucket, remoteUpdatedBlob2.bucket()); + assertEquals(updatedBlob1.name(), remoteUpdatedBlob1.name()); + assertEquals(updatedBlob2.name(), remoteUpdatedBlob2.name()); + assertEquals(updatedBlob1.contentType(), remoteUpdatedBlob1.contentType()); + assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType()); + + // Batch get request + BatchRequest getRequest = BatchRequest.builder() + .get(bucket, sourceBlobName1) + .get(bucket, sourceBlobName2) + .build(); + BatchResponse getResponse = storage.apply(getRequest); + assertEquals(2, getResponse.gets().size()); + assertEquals(0, getResponse.deletes().size()); + assertEquals(0, getResponse.updates().size()); + BlobInfo remoteBlob1 = getResponse.gets().get(0).get(); + BlobInfo remoteBlob2 = getResponse.gets().get(1).get(); + assertEquals(remoteUpdatedBlob1, remoteBlob1); + assertEquals(remoteUpdatedBlob2, remoteBlob2); + + // Batch delete request + BatchRequest deleteRequest = BatchRequest.builder() + .delete(bucket, sourceBlobName1) + .delete(bucket, sourceBlobName2) + .build(); + BatchResponse deleteResponse = storage.apply(deleteRequest); + assertEquals(2, deleteResponse.deletes().size()); + assertEquals(0, deleteResponse.gets().size()); + assertEquals(0, deleteResponse.updates().size()); + assertTrue(deleteResponse.deletes().get(0).get()); + assertTrue(deleteResponse.deletes().get(1).get()); + } + + @Test + public void testBatchRequestFail() { + String blobName = "test-batch-request-blob-fail"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(blob)); + BlobInfo updatedBlob = blob.toBuilder().generation(42L).build(); + BatchRequest batchRequest = BatchRequest.builder() + .update(updatedBlob, Storage.BlobTargetOption.generationMatch()) + .delete(bucket, blobName, Storage.BlobSourceOption.generationMatch(42L)) + .get(bucket, blobName, Storage.BlobSourceOption.generationMatch(42L)) + .build(); + BatchResponse updateResponse = storage.apply(batchRequest); + assertEquals(1, updateResponse.updates().size()); + assertEquals(1, updateResponse.deletes().size()); + assertEquals(1, updateResponse.gets().size()); + assertTrue(updateResponse.updates().get(0).failed()); + assertTrue(updateResponse.gets().get(0).failed()); + assertTrue(updateResponse.deletes().get(0).failed()); + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testReadAndWriteChannels() throws UnsupportedEncodingException, IOException { + String blobName = "test-read-and-write-channels-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + byte[] stringBytes; + try (BlobWriteChannel writer = storage.writer(blob)) { + stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); + writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); + writer.write(ByteBuffer.wrap(stringBytes)); + } + ByteBuffer readBytes; + ByteBuffer readStringBytes; + try (BlobReadChannel reader = storage.reader(bucket, blobName)) { + readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); + readStringBytes = ByteBuffer.allocate(stringBytes.length); + reader.read(readBytes); + reader.read(readStringBytes); + } + assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array()); + assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8)); + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testReadChannelFail() throws UnsupportedEncodingException, IOException { + String blobName = "test-read-channel-blob-fail"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(blob)); + try (BlobReadChannel reader = + storage.reader(bucket, blobName, Storage.BlobSourceOption.metagenerationMatch(42L))) { + reader.read(ByteBuffer.allocate(42)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(bucket, blobName)); + } + + @Test + public void testWriteChannelFail() throws UnsupportedEncodingException, IOException { + String blobName = "test-write-channel-blob-fail"; + BlobInfo blob = BlobInfo.builder(bucket, blobName).generation(42L).build(); + try { + try (BlobWriteChannel writer = + storage.writer(blob, Storage.BlobTargetOption.generationMatch())) { + writer.write(ByteBuffer.allocate(42)); + } + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + } + + @Test + public void testGetSignedUrl() throws IOException { + String blobName = "test-get-signed-url-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(BlobInfo.of(bucket, blobName), BLOB_BYTE_CONTENT)); + Calendar calendar = Calendar.getInstance(); + calendar.add(Calendar.HOUR, 1); + long expiration = calendar.getTimeInMillis() / 1000; + URL url = storage.signUrl(blob, expiration); + URLConnection connection = url.openConnection(); + byte[] readBytes = new byte[BLOB_BYTE_CONTENT.length]; + try (InputStream responseStream = connection.getInputStream()) { + assertEquals(BLOB_BYTE_CONTENT.length, responseStream.read(readBytes)); + assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); + assertTrue(storage.delete(bucket, blobName)); + } + } + + @Test + public void testPostSignedUrl() throws IOException { + String blobName = "test-post-signed-url-blob"; + BlobInfo blob = BlobInfo.of(bucket, blobName); + assertNotNull(storage.create(BlobInfo.of(bucket, blobName))); + Calendar calendar = Calendar.getInstance(); + calendar.add(Calendar.HOUR, 1); + long expiration = calendar.getTimeInMillis() / 1000; + URL url = storage.signUrl(blob, expiration, Storage.SignUrlOption.httpMethod(HttpMethod.POST)); + URLConnection connection = url.openConnection(); + connection.setDoOutput(true); + connection.connect(); + BlobInfo remoteBlob = storage.get(bucket, blobName); + assertNotNull(remoteBlob); + assertEquals(bucket, remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); + assertTrue(storage.delete(bucket, blobName)); + } +} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java new file mode 100644 index 000000000000..8a6ee21ce931 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java @@ -0,0 +1,90 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import com.google.gcloud.AuthCredentials; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Utility to create a remote storage configuration for testing + */ +public class RemoteGcsHelper { + + private static final Logger log = Logger.getLogger(RemoteGcsHelper.class.getName()); + private static final String BUCKET_NAME_PREFIX = "gcloud-test-bucket-temp-"; + private static final String PROJECT_ID_ENV_VAR = "GCLOUD_TESTS_PROJECT_ID"; + private static final String PRIVATE_KEY_ENV_VAR = "GCLOUD_TESTS_KEY"; + + private final StorageOptions options; + private final String bucket; + + private RemoteGcsHelper(StorageOptions options, String bucket) { + this.options = options; + this.bucket = bucket; + } + + public StorageOptions options() { + return options; + } + + public String bucket() { + return bucket; + } + + private static String generateBucketName() { + return BUCKET_NAME_PREFIX + UUID.randomUUID().toString(); + } + + public static RemoteGcsHelper create() { + if (System.getenv(PROJECT_ID_ENV_VAR) == null || System.getenv(PRIVATE_KEY_ENV_VAR) == null) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.INFO, "Environment variables {0} and {1} not set", new String[] { + PROJECT_ID_ENV_VAR, PRIVATE_KEY_ENV_VAR}); + } + return null; + } + String projectId = System.getenv(PROJECT_ID_ENV_VAR); + String stringKeyPath = System.getenv(PRIVATE_KEY_ENV_VAR); + File keyFile = new File(stringKeyPath); + try { + InputStream keyFileStream = new FileInputStream(keyFile); + StorageOptions options = StorageOptions.builder() + .authCredentials(AuthCredentials.createForJson(keyFileStream)) + .projectId(projectId) + .build(); + return new RemoteGcsHelper(options, generateBucketName()); + } catch (FileNotFoundException ex) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, ex.getMessage()); + } + return null; + } catch (IOException ex) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, ex.getMessage()); + } + return null; + } + } +} From bfd4fc4c31398f498417e98d945594f51e28be89 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Tue, 6 Oct 2015 16:21:44 +0200 Subject: [PATCH 3/5] Refactor storage integration tests - Add option to read keyfile with getResourceAsStream - Use -1 instead of 42 for (meta)generation - Add delay-timeout to listing - Make generateBucketName public and static in RemoteGcsHelper - Add static deleteBucketRecursively to RemoteGcsHelper - RemoteGcdHelper.create throws exception if env variables not set - Add javadoc to RemoteGcdHelper --- .../google/gcloud/storage/ITStorageTest.java | 67 +++--- .../gcloud/storage/RemoteGcsHelper.java | 190 +++++++++++++++--- 2 files changed, 202 insertions(+), 55 deletions(-) diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 714c680601f2..74833bd8fe3d 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -34,46 +34,44 @@ import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Calendar; +import java.util.Iterator; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class ITStorageTest { - private static StorageOptions options; private static Storage storage; private static RemoteGcsHelper gcsHelper; - private static String bucket; + private static final String bucket = RemoteGcsHelper.generateBucketName(); private static final String CONTENT_TYPE = "text/plain"; private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD}; private static final String BLOB_STRING_CONTENT = "Hello Google Cloud Storage!"; - @Rule - public ExpectedException thrown = ExpectedException.none(); - @BeforeClass public static void beforeClass() { - gcsHelper = RemoteGcsHelper.create(); - if (gcsHelper != null) { - options = gcsHelper.options(); - storage = StorageFactory.instance().get(options); - bucket = gcsHelper.bucket(); + try { + gcsHelper = RemoteGcsHelper.create(); + storage = StorageFactory.instance().get(gcsHelper.options()); storage.create(BucketInfo.of(bucket)); + } catch (RemoteGcsHelper.GcsHelperException e) { + // ignore } } @AfterClass - public static void afterClass() { + public static void afterClass() + throws ExecutionException, TimeoutException, InterruptedException { if (storage != null) { - for (BlobInfo info : storage.list(bucket)) { - storage.delete(bucket, info.name()); + if (!RemoteGcsHelper.deleteBucketRecursively(storage, bucket, 5, TimeUnit.SECONDS)) { + throw new RuntimeException("Bucket deletion timed out. Could not delete non-empty bucket"); } - storage.delete(bucket); } } @@ -82,11 +80,16 @@ public void beforeMethod() { org.junit.Assume.assumeNotNull(storage); } - @Test - public void testListBuckets() { - ListResult bucketList = storage.list(Storage.BucketListOption.prefix(bucket)); - for (BucketInfo bucketInfo : bucketList) { - assertTrue(bucketInfo.name().startsWith(bucket)); + @Test(timeout = 5000) + public void testListBuckets() throws InterruptedException { + Iterator bucketIterator = + storage.list(Storage.BucketListOption.prefix(bucket)).iterator(); + while (!bucketIterator.hasNext()) { + Thread.sleep(500); + bucketIterator = storage.list(Storage.BucketListOption.prefix(bucket)).iterator(); + } + while (bucketIterator.hasNext()) { + assertTrue(bucketIterator.next().name().startsWith(bucket)); } } @@ -137,7 +140,7 @@ public void testCreateBlobFail() { BlobInfo blob = BlobInfo.of(bucket, blobName); assertNotNull(storage.create(blob)); try { - storage.create(blob.toBuilder().generation(42L).build(), BLOB_BYTE_CONTENT, + storage.create(blob.toBuilder().generation(-1L).build(), BLOB_BYTE_CONTENT, Storage.BlobTargetOption.generationMatch()); fail("StorageException was expected"); } catch (StorageException ex) { @@ -165,7 +168,7 @@ public void testUpdateBlobFail() { BlobInfo blob = BlobInfo.of(bucket, blobName); assertNotNull(storage.create(blob)); try { - storage.update(blob.toBuilder().contentType(CONTENT_TYPE).generation(42L).build(), + storage.update(blob.toBuilder().contentType(CONTENT_TYPE).generation(-1L).build(), Storage.BlobTargetOption.generationMatch()); fail("StorageException was expected"); } catch (StorageException ex) { @@ -187,7 +190,7 @@ public void testDeleteBlobFail() { BlobInfo blob = BlobInfo.of(bucket, blobName); assertNotNull(storage.create(blob)); try { - storage.delete(bucket, blob.name(), Storage.BlobSourceOption.generationMatch(42L)); + storage.delete(bucket, blob.name(), Storage.BlobSourceOption.generationMatch(-1L)); fail("StorageException was expected"); } catch (StorageException ex) { // expected @@ -232,8 +235,8 @@ public void testComposeBlobFail() { String targetBlobName = "test-compose-blob-fail-target"; BlobInfo targetBlob = BlobInfo.of(bucket, targetBlobName); Storage.ComposeRequest req = Storage.ComposeRequest.builder() - .addSource(sourceBlobName1, 42L) - .addSource(sourceBlobName2, 42L) + .addSource(sourceBlobName1, -1L) + .addSource(sourceBlobName2, -1L) .target(targetBlob) .build(); try { @@ -290,7 +293,7 @@ public void testCopyBlobFail() { Storage.CopyRequest req = new Storage.CopyRequest.Builder() .source(bucket, sourceBlobName) .target(BlobInfo.builder(bucket, targetBlobName).build()) - .sourceOptions(Storage.BlobSourceOption.metagenerationMatch(42L)) + .sourceOptions(Storage.BlobSourceOption.metagenerationMatch(-1L)) .build(); try { storage.copy(req); @@ -362,11 +365,11 @@ public void testBatchRequestFail() { String blobName = "test-batch-request-blob-fail"; BlobInfo blob = BlobInfo.of(bucket, blobName); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = blob.toBuilder().generation(42L).build(); + BlobInfo updatedBlob = blob.toBuilder().generation(-1L).build(); BatchRequest batchRequest = BatchRequest.builder() .update(updatedBlob, Storage.BlobTargetOption.generationMatch()) - .delete(bucket, blobName, Storage.BlobSourceOption.generationMatch(42L)) - .get(bucket, blobName, Storage.BlobSourceOption.generationMatch(42L)) + .delete(bucket, blobName, Storage.BlobSourceOption.generationMatch(-1L)) + .get(bucket, blobName, Storage.BlobSourceOption.generationMatch(-1L)) .build(); BatchResponse updateResponse = storage.apply(batchRequest); assertEquals(1, updateResponse.updates().size()); @@ -407,7 +410,7 @@ public void testReadChannelFail() throws UnsupportedEncodingException, IOExcepti BlobInfo blob = BlobInfo.of(bucket, blobName); assertNotNull(storage.create(blob)); try (BlobReadChannel reader = - storage.reader(bucket, blobName, Storage.BlobSourceOption.metagenerationMatch(42L))) { + storage.reader(bucket, blobName, Storage.BlobSourceOption.metagenerationMatch(-1L))) { reader.read(ByteBuffer.allocate(42)); fail("StorageException was expected"); } catch (StorageException ex) { @@ -419,7 +422,7 @@ public void testReadChannelFail() throws UnsupportedEncodingException, IOExcepti @Test public void testWriteChannelFail() throws UnsupportedEncodingException, IOException { String blobName = "test-write-channel-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).generation(42L).build(); + BlobInfo blob = BlobInfo.builder(bucket, blobName).generation(-1L).build(); try { try (BlobWriteChannel writer = storage.writer(blob, Storage.BlobTargetOption.generationMatch())) { diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java index 8a6ee21ce931..2c79aaa78afe 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java @@ -16,14 +16,23 @@ package com.google.gcloud.storage; +import com.google.common.collect.ImmutableMap; import com.google.gcloud.AuthCredentials; +import com.google.gcloud.storage.RemoteGcsHelper.Option.KeyFromClasspath; -import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.io.IOException; import java.io.InputStream; +import java.io.IOException; +import java.util.Map; import java.util.UUID; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.logging.Level; import java.util.logging.Logger; @@ -38,53 +47,188 @@ public class RemoteGcsHelper { private static final String PRIVATE_KEY_ENV_VAR = "GCLOUD_TESTS_KEY"; private final StorageOptions options; - private final String bucket; - private RemoteGcsHelper(StorageOptions options, String bucket) { + private RemoteGcsHelper(StorageOptions options) { this.options = options; - this.bucket = bucket; } + /** + * Returns a {@StorageOptions} object to be used for testing. + */ public StorageOptions options() { return options; } - public String bucket() { - return bucket; + /** + * Delete a bucket recursively. Objects in the bucket are listed and deleted until bucket deletion + * succeeds or {@code timeout} expires. + * + * @param storage the storage service to be used to issue requests + * @param bucket the bucket to be deleted + * @param timeout the maximum time to wait + * @param unit the time unit of the timeout argument + * @return true if deletion succeeded, false if timeout expired. + * @throws InterruptedException if the thread deleting the bucket is interrupted while waiting + * @throws ExecutionException if an exception was thrown while deleting bucket or bucket objects + */ + public static Boolean deleteBucketRecursively(Storage storage, String bucket, long timeout, + TimeUnit unit) throws InterruptedException, ExecutionException { + ExecutorService executor = Executors.newSingleThreadExecutor(); + Future future = executor.submit(new DeleteBucketTask(storage, bucket)); + try { + return future.get(timeout, unit); + } catch (TimeoutException ex) { + return false; + } } - - private static String generateBucketName() { + + /** + * Returns a bucket name generated using a random UUID. + */ + public static String generateBucketName() { return BUCKET_NAME_PREFIX + UUID.randomUUID().toString(); } - public static RemoteGcsHelper create() { - if (System.getenv(PROJECT_ID_ENV_VAR) == null || System.getenv(PRIVATE_KEY_ENV_VAR) == null) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.INFO, "Environment variables {0} and {1} not set", new String[] { - PROJECT_ID_ENV_VAR, PRIVATE_KEY_ENV_VAR}); - } - return null; - } + /** + * Creates a {@code RemoteGcsHelper} object. + * + * @param options creation options + * @return A {@code RemoteGcsHelper} object for the provided options. + * @throws com.google.gcloud.storage.RemoteGcsHelper.GcsHelperException if environment variables + * {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY_PATH} are not set or if the file + * pointed by {@code GCLOUD_TESTS_KEY_PATH} does not exist + */ + public static RemoteGcsHelper create(Option... options) throws GcsHelperException { + boolean keyFromClassPath = false; + Map, Option> optionsMap = Option.asImmutableMap(options); + if (optionsMap.containsKey(KeyFromClasspath.class)) { + keyFromClassPath = + ((KeyFromClasspath) optionsMap.get(KeyFromClasspath.class)).keyFromClasspath(); + } String projectId = System.getenv(PROJECT_ID_ENV_VAR); String stringKeyPath = System.getenv(PRIVATE_KEY_ENV_VAR); - File keyFile = new File(stringKeyPath); + if (projectId == null) { + String message = "Environment variable " + PROJECT_ID_ENV_VAR + " not set"; + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, message); + } + throw new GcsHelperException(message); + } + if (stringKeyPath == null) { + String message = "Environment variable " + PRIVATE_KEY_ENV_VAR + " not set"; + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, message); + } + throw new GcsHelperException(message); + } try { - InputStream keyFileStream = new FileInputStream(keyFile); - StorageOptions options = StorageOptions.builder() + InputStream keyFileStream; + if (keyFromClassPath) { + keyFileStream = RemoteGcsHelper.class.getResourceAsStream(stringKeyPath); + if (keyFileStream == null) { + throw new FileNotFoundException(stringKeyPath + " not found in classpath"); + } + } else { + keyFileStream = new FileInputStream(stringKeyPath); + } + StorageOptions storageOptions = StorageOptions.builder() .authCredentials(AuthCredentials.createForJson(keyFileStream)) .projectId(projectId) .build(); - return new RemoteGcsHelper(options, generateBucketName()); + return new RemoteGcsHelper(storageOptions); } catch (FileNotFoundException ex) { if (log.isLoggable(Level.WARNING)) { log.log(Level.WARNING, ex.getMessage()); } - return null; + throw GcsHelperException.translate(ex); } catch (IOException ex) { if (log.isLoggable(Level.WARNING)) { log.log(Level.WARNING, ex.getMessage()); } - return null; + throw GcsHelperException.translate(ex); + } + } + + private static class DeleteBucketTask implements Callable { + + private Storage storage; + private String bucket; + + public DeleteBucketTask(Storage storage, String bucket) { + this.storage = storage; + this.bucket = bucket; + } + + @Override + public Boolean call() throws Exception { + while (true) { + for (BlobInfo info : storage.list(bucket)) { + storage.delete(bucket, info.name()); + } + try { + storage.delete(bucket); + return true; + } catch (StorageException e) { + if (e.code() == 409) { + Thread.sleep(500); + } else { + throw e; + } + } + } + } + } + + public static abstract class Option implements java.io.Serializable { + + private static final long serialVersionUID = 8849118657896662369L; + + public static final class KeyFromClasspath extends Option { + + private static final long serialVersionUID = -5506049413185246821L; + + private final boolean keyFromClasspath; + + public KeyFromClasspath(boolean keyFromClasspath) { + this.keyFromClasspath = keyFromClasspath; + } + + public boolean keyFromClasspath() { + return keyFromClasspath; + } + } + + Option() { + // package protected + } + + public static KeyFromClasspath keyFromClassPath() { + return new KeyFromClasspath(true); + } + + static Map, Option> asImmutableMap(Option... options) { + ImmutableMap.Builder, Option> builder = ImmutableMap.builder(); + for (Option option : options) { + builder.put(option.getClass(), option); + } + return builder.build(); + } + } + + public static class GcsHelperException extends RuntimeException { + + private static final long serialVersionUID = -7756074894502258736L; + + public GcsHelperException(String message) { + super(message); + } + + public GcsHelperException(String message, Throwable cause) { + super(message, cause); + } + + public static GcsHelperException translate(Exception ex) { + return new GcsHelperException(ex.getMessage(), ex); } } } From 3f9f621b755e9ac3fd8af399e7ddeea098163aa4 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Wed, 7 Oct 2015 11:17:50 +0200 Subject: [PATCH 4/5] Updated exception handling in ITStorageTest, minor refactoring - deleteBucketRecursively renamed to forceDelete - Add explicit shutdown of executor to forceDelete - Make itegration test fail if env variables are not set - Log warning if forceDelete times out --- .../google/gcloud/storage/ITStorageTest.java | 25 +++++++------------ .../gcloud/storage/RemoteGcsHelper.java | 14 ++++++----- 2 files changed, 17 insertions(+), 22 deletions(-) diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 74833bd8fe3d..d2056e8cbbb1 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -38,9 +38,10 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.logging.Level; +import java.util.logging.Logger; import org.junit.AfterClass; -import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -49,6 +50,7 @@ public class ITStorageTest { private static Storage storage; private static RemoteGcsHelper gcsHelper; + private static final Logger log = Logger.getLogger(ITStorageTest.class.getName()); private static final String bucket = RemoteGcsHelper.generateBucketName(); private static final String CONTENT_TYPE = "text/plain"; private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD}; @@ -56,30 +58,21 @@ public class ITStorageTest { @BeforeClass public static void beforeClass() { - try { - gcsHelper = RemoteGcsHelper.create(); - storage = StorageFactory.instance().get(gcsHelper.options()); - storage.create(BucketInfo.of(bucket)); - } catch (RemoteGcsHelper.GcsHelperException e) { - // ignore - } + gcsHelper = RemoteGcsHelper.create(); + storage = StorageFactory.instance().get(gcsHelper.options()); + storage.create(BucketInfo.of(bucket)); } @AfterClass public static void afterClass() throws ExecutionException, TimeoutException, InterruptedException { - if (storage != null) { - if (!RemoteGcsHelper.deleteBucketRecursively(storage, bucket, 5, TimeUnit.SECONDS)) { - throw new RuntimeException("Bucket deletion timed out. Could not delete non-empty bucket"); + if (!RemoteGcsHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS)) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", bucket); } } } - @Before - public void beforeMethod() { - org.junit.Assume.assumeNotNull(storage); - } - @Test(timeout = 5000) public void testListBuckets() throws InterruptedException { Iterator bucketIterator = diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java index 2c79aaa78afe..f4c9b22a47b5 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelper.java @@ -60,8 +60,8 @@ public StorageOptions options() { } /** - * Delete a bucket recursively. Objects in the bucket are listed and deleted until bucket deletion - * succeeds or {@code timeout} expires. + * Deletes a bucket, even if non-empty. Objects in the bucket are listed and deleted until bucket + * deletion succeeds or {@code timeout} expires. * * @param storage the storage service to be used to issue requests * @param bucket the bucket to be deleted @@ -71,14 +71,16 @@ public StorageOptions options() { * @throws InterruptedException if the thread deleting the bucket is interrupted while waiting * @throws ExecutionException if an exception was thrown while deleting bucket or bucket objects */ - public static Boolean deleteBucketRecursively(Storage storage, String bucket, long timeout, - TimeUnit unit) throws InterruptedException, ExecutionException { + public static Boolean forceDelete(Storage storage, String bucket, long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException { ExecutorService executor = Executors.newSingleThreadExecutor(); Future future = executor.submit(new DeleteBucketTask(storage, bucket)); try { return future.get(timeout, unit); } catch (TimeoutException ex) { return false; + } finally { + executor.shutdown(); } } @@ -95,8 +97,8 @@ public static String generateBucketName() { * @param options creation options * @return A {@code RemoteGcsHelper} object for the provided options. * @throws com.google.gcloud.storage.RemoteGcsHelper.GcsHelperException if environment variables - * {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY_PATH} are not set or if the file - * pointed by {@code GCLOUD_TESTS_KEY_PATH} does not exist + * {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY} are not set or if the file + * pointed by {@code GCLOUD_TESTS_KEY} does not exist */ public static RemoteGcsHelper create(Option... options) throws GcsHelperException { boolean keyFromClassPath = false; From a1e3cc409e9235caf5e512423ff68709a39bf436 Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Wed, 7 Oct 2015 13:07:33 +0200 Subject: [PATCH 5/5] Update travis config to run integration tests - Add utilities/verify.sh script that avoid running IT tests on PRs - Move key decryption to utilities/verify.sh (we need API key) - Call utilities/verity.sh from .travis.yml --- .travis.yml | 3 ++- utilities/after_success.sh | 6 ------ utilities/verify.sh | 19 +++++++++++++++++++ 3 files changed, 21 insertions(+), 7 deletions(-) create mode 100755 utilities/verify.sh diff --git a/.travis.yml b/.travis.yml index 796ba78e08b7..ab421366db6a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,7 +8,8 @@ before_install: - git clone -b travis `git config --get remote.origin.url` target/travis - cp target/travis/settings.xml ~/.m2/settings.xml install: mvn install -DskipTests=true -Dgpg.skip=true -script: mvn verify +script: +- utilities/verify.sh branches: only: - master diff --git a/utilities/after_success.sh b/utilities/after_success.sh index 600d214958f5..8752c071583d 100755 --- a/utilities/after_success.sh +++ b/utilities/after_success.sh @@ -11,12 +11,6 @@ if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" -a "${TRAVIS_BRANCH}" == "master" - SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)') if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then - # Get signing tools - openssl aes-256-cbc -K $encrypted_631490ecae8f_key -iv $encrypted_631490ecae8f_iv -in target/travis/signing-tools.tar.enc -out target/travis/signing-tools.tar -d - mkdir target/travis/signing-tools - chmod 700 target/travis/signing-tools - tar xvf target/travis/signing-tools.tar -C target/travis/signing-tools - # Deploy site if not a SNAPSHOT git config --global user.name "travis-ci" git config --global user.email "travis@travis-ci.org" diff --git a/utilities/verify.sh b/utilities/verify.sh new file mode 100755 index 000000000000..47f8f1e7c0d4 --- /dev/null +++ b/utilities/verify.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# This script is used by Travis-CI to run tests. +# This script is referenced in .travis.yml. + +if [ "${TRAVIS_BRANCH}" == "master" -a "${TRAVIS_PULL_REQUEST}" == "false" ]; then + # Get signing tools and API keyfile + openssl aes-256-cbc -K $encrypted_631490ecae8f_key -iv $encrypted_631490ecae8f_iv -in target/travis/signing-tools.tar.enc -out target/travis/signing-tools.tar -d + mkdir target/travis/signing-tools + chmod 700 target/travis/signing-tools + tar xvf target/travis/signing-tools.tar -C target/travis/signing-tools + # Export test env variables + export GCLOUD_TESTS_PROJECT_ID="gcloud-devel" + export GCLOUD_TESTS_KEY=$TRAVIS_BUILD_DIR/target/travis/signing-tools/gcloud-devel-travis.json + # Run verify + mvn verify +else + mvn verify -DskipITs +fi