Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: skip validation for server decompressed objects #1063

Merged
merged 8 commits into from
Feb 6, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 16 additions & 3 deletions src/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1212,6 +1212,7 @@ class File extends ServiceObject<File> {
// once Node 8 support is discontinued
const throughStream = streamEvents(through()) as Duplex;

let isServedCompressed = true;
let crc32c = true;
let md5 = false;

Expand Down Expand Up @@ -1294,7 +1295,7 @@ class File extends ServiceObject<File> {
// applicable, to the user.
const onResponse = (
err: Error | null,
body: ResponseBody,
_body: ResponseBody,
rawResponseStream: Metadata
) => {
if (err) {
Expand All @@ -1312,7 +1313,7 @@ class File extends ServiceObject<File> {
rawResponseStream.on('error', onComplete);

const headers = rawResponseStream.toJSON().headers;
const isCompressed = headers['content-encoding'] === 'gzip';
isServedCompressed = headers['content-encoding'] === 'gzip';
const shouldRunValidation = !rangeRequest && (crc32c || md5);
const throughStreams: Writable[] = [];

Expand All @@ -1321,7 +1322,7 @@ class File extends ServiceObject<File> {
throughStreams.push(validateStream);
}

if (isCompressed && options.decompress) {
if (isServedCompressed && options.decompress) {
throughStreams.push(zlib.createGunzip());
}

Expand Down Expand Up @@ -1369,6 +1370,16 @@ class File extends ServiceObject<File> {

onCompleteCalled = true;

// TODO(https://github.com/googleapis/nodejs-storage/issues/709):
// Remove once the backend issue is fixed.
// If object is stored compressed (having metadata.contentEncoding === 'gzip')
// and was served decompressed, then skip checksum validation because the
// remote checksum is computed against the compressed version of the object.
if (this.metadata.contentEncoding === 'gzip' && !isServedCompressed) {
throughStream.end();
return;
}

const hashes = {
crc32c: this.metadata.crc32c,
md5: this.metadata.md5Hash,
Expand Down Expand Up @@ -1602,6 +1613,8 @@ class File extends ServiceObject<File> {
* supported for composite objects. An error will be raised if MD5 is
* specified but is not available. You may also choose to skip validation
* completely, however this is **not recommended**.
* NOTE: Validation is automatically skipped for objects that were
* uploaded using the `gzip` option and have already compressed content.
*/
/**
* Create a writable stream to overwrite the contents of the file in your
Expand Down
27 changes: 27 additions & 0 deletions system-test/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2250,6 +2250,33 @@ describe('storage', () => {
});
});

it('should skip validation if file is served decompressed', async () => {
const filename = 'logo-gzipped.png';
await bucket.upload(FILES.logo.path, {destination: filename, gzip: true});

tmp.setGracefulCleanup();
const {name: tmpFilePath} = tmp.fileSync();

const file = bucket.file(filename);

await new Promise((resolve, reject) => {
file
.createReadStream()
.on('error', reject)
.on('response', raw => {
assert.strictEqual(
raw.toJSON().headers['content-encoding'],
undefined
);
})
.pipe(fs.createWriteStream(tmpFilePath))
.on('error', reject)
.on('finish', resolve);
});

await file.delete();
});

describe('simple write', () => {
it('should save arbitrary data', done => {
const file = bucket.file('TestFile');
Expand Down
56 changes: 50 additions & 6 deletions test/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1293,7 +1293,7 @@ describe('File', () => {

describe('validation', () => {
const data = 'test';
let fakeValidationStream: stream.Stream;
let fakeValidationStream: stream.Stream & {test: Function};

beforeEach(() => {
file.metadata.mediaLink = 'http://uri';
Expand All @@ -1306,16 +1306,60 @@ describe('File', () => {
callback();
};

fakeValidationStream = through();
// tslint:disable-next-line no-any
(fakeValidationStream as any).test = () => {
return true;
};
fakeValidationStream = Object.assign(through(), {test: () => true});
hashStreamValidationOverride = () => {
return fakeValidationStream;
};
});

describe('server decompression', () => {
beforeEach(() => {
handleRespOverride = (
err: Error,
res: {},
body: {},
callback: Function
) => {
const rawResponseStream = through();
Object.assign(rawResponseStream, {
toJSON() {
return {
headers: {},
};
},
});
callback(null, null, rawResponseStream);
setImmediate(() => {
rawResponseStream.end(data);
});
};
file.requestStream = getFakeSuccessfulRequest(data);
});

it('should skip validation if file was stored compressed', done => {
const validateStub = sinon.stub().returns(true);
fakeValidationStream.test = validateStub;

file.getMetadata = (options: {}, callback: Function) => {
file.metadata = {
crc32c: '####wA==',
md5Hash: 'CY9rzUYh03PK3k6DJie09g==',
contentEncoding: 'gzip',
};
callback();
};

file
.createReadStream({validation: 'crc32c'})
.on('error', done)
.on('end', () => {
assert(validateStub.notCalled);
done();
})
.resume();
});
});

it('should emit errors from the validation stream', done => {
const error = new Error('Error.');

Expand Down