Skip to content

Commit

Permalink
fix: skip validation for server decompressed objects (#1063)
Browse files Browse the repository at this point in the history
* fix: skip validation for server decompressed objects

* fix: formatting changes to todo comment for tracking

* fix: lint

* fix: remove unnecessary var set

* use async/await

* listen to response evt on read stream

* npm run fix

Co-authored-by: Christopher Wilcox <crwilcox@google.com>
  • Loading branch information
jkwlui and crwilcox committed Feb 10, 2020
1 parent a019923 commit a0d9f4f
Show file tree
Hide file tree
Showing 3 changed files with 93 additions and 9 deletions.
19 changes: 16 additions & 3 deletions src/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1149,6 +1149,7 @@ class File extends ServiceObject<File> {
// once Node 8 support is discontinued
const throughStream = streamEvents(through()) as Duplex;

let isServedCompressed = true;
let crc32c = true;
let md5 = false;

Expand Down Expand Up @@ -1231,7 +1232,7 @@ class File extends ServiceObject<File> {
// applicable, to the user.
const onResponse = (
err: Error | null,
body: ResponseBody,
_body: ResponseBody,
rawResponseStream: Metadata
) => {
if (err) {
Expand All @@ -1249,7 +1250,7 @@ class File extends ServiceObject<File> {
rawResponseStream.on('error', onComplete);

const headers = rawResponseStream.toJSON().headers;
const isCompressed = headers['content-encoding'] === 'gzip';
isServedCompressed = headers['content-encoding'] === 'gzip';
const shouldRunValidation = !rangeRequest && (crc32c || md5);
const throughStreams: Writable[] = [];

Expand All @@ -1258,7 +1259,7 @@ class File extends ServiceObject<File> {
throughStreams.push(validateStream);
}

if (isCompressed && options.decompress) {
if (isServedCompressed && options.decompress) {
throughStreams.push(zlib.createGunzip());
}

Expand Down Expand Up @@ -1306,6 +1307,16 @@ class File extends ServiceObject<File> {

onCompleteCalled = true;

// TODO(https://github.com/googleapis/nodejs-storage/issues/709):
// Remove once the backend issue is fixed.
// If object is stored compressed (having metadata.contentEncoding === 'gzip')
// and was served decompressed, then skip checksum validation because the
// remote checksum is computed against the compressed version of the object.
if (this.metadata.contentEncoding === 'gzip' && !isServedCompressed) {
throughStream.end();
return;
}

const hashes = {
crc32c: this.metadata.crc32c,
md5: this.metadata.md5Hash,
Expand Down Expand Up @@ -1539,6 +1550,8 @@ class File extends ServiceObject<File> {
* supported for composite objects. An error will be raised if MD5 is
* specified but is not available. You may also choose to skip validation
* completely, however this is **not recommended**.
* NOTE: Validation is automatically skipped for objects that were
* uploaded using the `gzip` option and have already compressed content.
*/
/**
* Create a writable stream to overwrite the contents of the file in your
Expand Down
27 changes: 27 additions & 0 deletions system-test/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2250,6 +2250,33 @@ describe('storage', () => {
});
});

it('should skip validation if file is served decompressed', async () => {
const filename = 'logo-gzipped.png';
await bucket.upload(FILES.logo.path, {destination: filename, gzip: true});

tmp.setGracefulCleanup();
const {name: tmpFilePath} = tmp.fileSync();

const file = bucket.file(filename);

await new Promise((resolve, reject) => {
file
.createReadStream()
.on('error', reject)
.on('response', raw => {
assert.strictEqual(
raw.toJSON().headers['content-encoding'],
undefined
);
})
.pipe(fs.createWriteStream(tmpFilePath))
.on('error', reject)
.on('finish', resolve);
});

await file.delete();
});

describe('simple write', () => {
it('should save arbitrary data', done => {
const file = bucket.file('TestFile');
Expand Down
56 changes: 50 additions & 6 deletions test/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1290,7 +1290,7 @@ describe('File', () => {

describe('validation', () => {
const data = 'test';
let fakeValidationStream: stream.Stream;
let fakeValidationStream: stream.Stream & {test: Function};

beforeEach(() => {
file.metadata.mediaLink = 'http://uri';
Expand All @@ -1303,16 +1303,60 @@ describe('File', () => {
callback();
};

fakeValidationStream = through();
// tslint:disable-next-line no-any
(fakeValidationStream as any).test = () => {
return true;
};
fakeValidationStream = Object.assign(through(), {test: () => true});
hashStreamValidationOverride = () => {
return fakeValidationStream;
};
});

describe('server decompression', () => {
beforeEach(() => {
handleRespOverride = (
err: Error,
res: {},
body: {},
callback: Function
) => {
const rawResponseStream = through();
Object.assign(rawResponseStream, {
toJSON() {
return {
headers: {},
};
},
});
callback(null, null, rawResponseStream);
setImmediate(() => {
rawResponseStream.end(data);
});
};
file.requestStream = getFakeSuccessfulRequest(data);
});

it('should skip validation if file was stored compressed', done => {
const validateStub = sinon.stub().returns(true);
fakeValidationStream.test = validateStub;

file.getMetadata = (options: {}, callback: Function) => {
file.metadata = {
crc32c: '####wA==',
md5Hash: 'CY9rzUYh03PK3k6DJie09g==',
contentEncoding: 'gzip',
};
callback();
};

file
.createReadStream({validation: 'crc32c'})
.on('error', done)
.on('end', () => {
assert(validateStub.notCalled);
done();
})
.resume();
});
});

it('should emit errors from the validation stream', done => {
const error = new Error('Error.');

Expand Down

0 comments on commit a0d9f4f

Please sign in to comment.