Skip to content

Commit

Permalink
fix: skip validation for server decompressed objects
Browse files Browse the repository at this point in the history
  • Loading branch information
jkwlui committed Feb 6, 2020
1 parent fcceece commit 85a6671
Show file tree
Hide file tree
Showing 3 changed files with 97 additions and 9 deletions.
20 changes: 17 additions & 3 deletions src/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1212,6 +1212,7 @@ class File extends ServiceObject<File> {
// once Node 8 support is discontinued
const throughStream = streamEvents(through()) as Duplex;

let isServedCompressed = true;
let crc32c = true;
let md5 = false;

Expand Down Expand Up @@ -1294,7 +1295,7 @@ class File extends ServiceObject<File> {
// applicable, to the user.
const onResponse = (
err: Error | null,
body: ResponseBody,
_body: ResponseBody,
rawResponseStream: Metadata
) => {
if (err) {
Expand All @@ -1312,7 +1313,7 @@ class File extends ServiceObject<File> {
rawResponseStream.on('error', onComplete);

const headers = rawResponseStream.toJSON().headers;
const isCompressed = headers['content-encoding'] === 'gzip';
isServedCompressed = headers['content-encoding'] === 'gzip';
const shouldRunValidation = !rangeRequest && (crc32c || md5);
const throughStreams: Writable[] = [];

Expand All @@ -1321,7 +1322,7 @@ class File extends ServiceObject<File> {
throughStreams.push(validateStream);
}

if (isCompressed && options.decompress) {
if (isServedCompressed && options.decompress) {
throughStreams.push(zlib.createGunzip());
}

Expand Down Expand Up @@ -1369,6 +1370,17 @@ class File extends ServiceObject<File> {

onCompleteCalled = true;

// TODO: Remove once the backend issue is fixed.
// If object is stored compressed (having metadata.contentEncoding === 'gzip')
// and was served decompressed, then skip checksum validation because the
// remote checksum is computed against the compressed version of the object.
// See https://github.com/googleapis/nodejs-storage/issues/709.
if (this.metadata.contentEncoding === 'gzip' && !isServedCompressed) {
onCompleteCalled = true;
throughStream.end();
return;
}

const hashes = {
crc32c: this.metadata.crc32c,
md5: this.metadata.md5Hash,
Expand Down Expand Up @@ -1602,6 +1614,8 @@ class File extends ServiceObject<File> {
* supported for composite objects. An error will be raised if MD5 is
* specified but is not available. You may also choose to skip validation
* completely, however this is **not recommended**.
* NOTE: Validation is automatically skipped for objects that were
* uploaded using the `gzip` option and have already compressed content.
*/
/**
* Create a writable stream to overwrite the contents of the file in your
Expand Down
27 changes: 27 additions & 0 deletions system-test/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2250,6 +2250,33 @@ describe('storage', () => {
});
});

it('should skip validation if file is served decompressed', done => {
const filename = 'logo-gzipped.png';
bucket
.upload(FILES.logo.path, {
destination: filename,
gzip: true,
})
.then(() => {
tmp.setGracefulCleanup();
tmp.file((err, tmpFilePath) => {
assert.ifError(err);

const file = bucket.file(filename)
file.createReadStream()
.pipe(fs.createWriteStream(tmpFilePath))
.on('error', done)
.on('response', (err, body, raw) => {
assert.strictEqual(raw.toJSON().headers['content-encoding'], undefined);
})
.on('finish', () => {
file.delete(done);
});
});
})
.catch(done);
});

describe('simple write', () => {
it('should save arbitrary data', done => {
const file = bucket.file('TestFile');
Expand Down
59 changes: 53 additions & 6 deletions test/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1293,7 +1293,7 @@ describe('File', () => {

describe('validation', () => {
const data = 'test';
let fakeValidationStream: stream.Stream;
let fakeValidationStream: stream.Stream & {'test': Function};

beforeEach(() => {
file.metadata.mediaLink = 'http://uri';
Expand All @@ -1306,16 +1306,63 @@ describe('File', () => {
callback();
};

fakeValidationStream = through();
// tslint:disable-next-line no-any
(fakeValidationStream as any).test = () => {
return true;
};
fakeValidationStream = Object.assign(
through(),
{test: () => true},
);
hashStreamValidationOverride = () => {
return fakeValidationStream;
};
});

describe('server decompression', () => {
beforeEach(() => {
handleRespOverride = (
err: Error,
res: {},
body: {},
callback: Function
) => {
const rawResponseStream = through();
Object.assign(rawResponseStream, {
toJSON() {
return {
headers: {},
};
},
});
callback(null, null, rawResponseStream);
setImmediate(() => {
rawResponseStream.end(data);
});
};
file.requestStream = getFakeSuccessfulRequest(data);
});

it('should skip validation if file was stored compressed', done => {
const validateStub = sinon.stub().returns(true);
fakeValidationStream.test = validateStub;

file.getMetadata = (options: {}, callback: Function) => {
file.metadata = {
crc32c: '####wA==',
md5Hash: 'CY9rzUYh03PK3k6DJie09g==',
contentEncoding: 'gzip',
};
callback();
};

file
.createReadStream({validation: 'crc32c'})
.on('error', done)
.on('end', () => {
assert(validateStub.notCalled);
done();
})
.resume();
});
});

it('should emit errors from the validation stream', done => {
const error = new Error('Error.');

Expand Down

0 comments on commit 85a6671

Please sign in to comment.