diff --git a/src/file.ts b/src/file.ts index 56db4a542..71ad657d9 100644 --- a/src/file.ts +++ b/src/file.ts @@ -1212,6 +1212,7 @@ class File extends ServiceObject { // once Node 8 support is discontinued const throughStream = streamEvents(through()) as Duplex; + let isServedCompressed = true; let crc32c = true; let md5 = false; @@ -1294,7 +1295,7 @@ class File extends ServiceObject { // applicable, to the user. const onResponse = ( err: Error | null, - body: ResponseBody, + _body: ResponseBody, rawResponseStream: Metadata ) => { if (err) { @@ -1312,7 +1313,7 @@ class File extends ServiceObject { rawResponseStream.on('error', onComplete); const headers = rawResponseStream.toJSON().headers; - const isCompressed = headers['content-encoding'] === 'gzip'; + isServedCompressed = headers['content-encoding'] === 'gzip'; const shouldRunValidation = !rangeRequest && (crc32c || md5); const throughStreams: Writable[] = []; @@ -1321,7 +1322,7 @@ class File extends ServiceObject { throughStreams.push(validateStream); } - if (isCompressed && options.decompress) { + if (isServedCompressed && options.decompress) { throughStreams.push(zlib.createGunzip()); } @@ -1369,6 +1370,16 @@ class File extends ServiceObject { onCompleteCalled = true; + // TODO(https://github.com/googleapis/nodejs-storage/issues/709): + // Remove once the backend issue is fixed. + // If object is stored compressed (having metadata.contentEncoding === 'gzip') + // and was served decompressed, then skip checksum validation because the + // remote checksum is computed against the compressed version of the object. + if (this.metadata.contentEncoding === 'gzip' && !isServedCompressed) { + throughStream.end(); + return; + } + const hashes = { crc32c: this.metadata.crc32c, md5: this.metadata.md5Hash, @@ -1602,6 +1613,8 @@ class File extends ServiceObject { * supported for composite objects. An error will be raised if MD5 is * specified but is not available. You may also choose to skip validation * completely, however this is **not recommended**. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. */ /** * Create a writable stream to overwrite the contents of the file in your diff --git a/system-test/storage.ts b/system-test/storage.ts index 374a870cb..819f1c97e 100644 --- a/system-test/storage.ts +++ b/system-test/storage.ts @@ -2250,6 +2250,33 @@ describe('storage', () => { }); }); + it('should skip validation if file is served decompressed', async () => { + const filename = 'logo-gzipped.png'; + await bucket.upload(FILES.logo.path, {destination: filename, gzip: true}); + + tmp.setGracefulCleanup(); + const {name: tmpFilePath} = tmp.fileSync(); + + const file = bucket.file(filename); + + await new Promise((resolve, reject) => { + file + .createReadStream() + .on('error', reject) + .on('response', raw => { + assert.strictEqual( + raw.toJSON().headers['content-encoding'], + undefined + ); + }) + .pipe(fs.createWriteStream(tmpFilePath)) + .on('error', reject) + .on('finish', resolve); + }); + + await file.delete(); + }); + describe('simple write', () => { it('should save arbitrary data', done => { const file = bucket.file('TestFile'); diff --git a/test/file.ts b/test/file.ts index f9df3a7c0..832ab4ef1 100644 --- a/test/file.ts +++ b/test/file.ts @@ -1293,7 +1293,7 @@ describe('File', () => { describe('validation', () => { const data = 'test'; - let fakeValidationStream: stream.Stream; + let fakeValidationStream: stream.Stream & {test: Function}; beforeEach(() => { file.metadata.mediaLink = 'http://uri'; @@ -1306,16 +1306,60 @@ describe('File', () => { callback(); }; - fakeValidationStream = through(); - // tslint:disable-next-line no-any - (fakeValidationStream as any).test = () => { - return true; - }; + fakeValidationStream = Object.assign(through(), {test: () => true}); hashStreamValidationOverride = () => { return fakeValidationStream; }; }); + describe('server decompression', () => { + beforeEach(() => { + handleRespOverride = ( + err: Error, + res: {}, + body: {}, + callback: Function + ) => { + const rawResponseStream = through(); + Object.assign(rawResponseStream, { + toJSON() { + return { + headers: {}, + }; + }, + }); + callback(null, null, rawResponseStream); + setImmediate(() => { + rawResponseStream.end(data); + }); + }; + file.requestStream = getFakeSuccessfulRequest(data); + }); + + it('should skip validation if file was stored compressed', done => { + const validateStub = sinon.stub().returns(true); + fakeValidationStream.test = validateStub; + + file.getMetadata = (options: {}, callback: Function) => { + file.metadata = { + crc32c: '####wA==', + md5Hash: 'CY9rzUYh03PK3k6DJie09g==', + contentEncoding: 'gzip', + }; + callback(); + }; + + file + .createReadStream({validation: 'crc32c'}) + .on('error', done) + .on('end', () => { + assert(validateStub.notCalled); + done(); + }) + .resume(); + }); + }); + it('should emit errors from the validation stream', done => { const error = new Error('Error.');