From c4a2aed137170708c4ab79db30ed5485cb16a5a6 Mon Sep 17 00:00:00 2001 From: Rich Hodgkins Date: Sat, 28 Mar 2020 12:08:57 +0000 Subject: [PATCH 1/3] test(bucket): failing test for #1133 --- test/bucket.ts | 81 +++++++++++++++++++++++----- test/testdata/dummylargetestfile.txt | 3 ++ 2 files changed, 71 insertions(+), 13 deletions(-) create mode 100644 test/testdata/dummylargetestfile.txt diff --git a/test/bucket.ts b/test/bucket.ts index ce630ddda..be5b0135a 100644 --- a/test/bucket.ts +++ b/test/bucket.ts @@ -2381,19 +2381,74 @@ describe('Bucket', () => { bucket.upload(textFilepath, options, assert.ifError); }); - it('should force a resumable upload', done => { - const fakeFile = new FakeFile(bucket, 'file-name'); - const options = {destination: fakeFile, resumable: true}; - fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { - const ws = new stream.Writable(); - ws.write = () => true; - setImmediate(() => { - assert.strictEqual(options_.resumable, options.resumable); - done(); - }); - return ws; - }; - bucket.upload(filepath, options, assert.ifError); + describe('resumable uploads', () => { + const dummyLargeFilepath = path.join( + __dirname, + '../../test/testdata', + 'dummylargetestfile.txt' + ); + const dummyLargeFileStat = require('fs').statSync(dummyLargeFilepath); + // Set size greater than threshold + dummyLargeFileStat.size = 5000001; + + let sandbox: sinon.SinonSandbox; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + + const statStub = sandbox.stub(require('fs'), 'stat'); + statStub + .withArgs(dummyLargeFilepath) + .callsArgWithAsync(1, null, dummyLargeFileStat); + statStub.callThrough(); + }); + + afterEach(() => sandbox.restore()); + + it('should force a resumable upload', done => { + const fakeFile = new FakeFile(bucket, 'file-name'); + const options = {destination: fakeFile, resumable: true}; + fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { + const ws = new stream.Writable(); + ws.write = () => true; + setImmediate(() => { + assert.strictEqual(options_.resumable, options.resumable); + done(); + }); + return ws; + }; + bucket.upload(filepath, options, assert.ifError); + }); + + it('should not pass resumable option to createWriteStream when file size is greater than minimum resumable threshold', done => { + const fakeFile = new FakeFile(bucket, 'file-name'); + const options = {destination: fakeFile}; + fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { + const ws = new stream.Writable(); + ws.write = () => true; + setImmediate(() => { + assert.ok(!('resumable' in options_)); + done(); + }); + return ws; + }; + bucket.upload(dummyLargeFilepath, options, assert.ifError); + }); + + it('should prevent resumable when file size is less than minimum resumable threshold', done => { + const fakeFile = new FakeFile(bucket, 'file-name'); + const options = {destination: fakeFile}; + fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { + const ws = new stream.Writable(); + ws.write = () => true; + setImmediate(() => { + assert.strictEqual(options_.resumable, false); + done(); + }); + return ws; + }; + bucket.upload(filepath, options, assert.ifError); + }); }); it('should allow overriding content type', done => { diff --git a/test/testdata/dummylargetestfile.txt b/test/testdata/dummylargetestfile.txt new file mode 100644 index 000000000..c4d462324 --- /dev/null +++ b/test/testdata/dummylargetestfile.txt @@ -0,0 +1,3 @@ +This is a test file! + +When using fs.stat should be stubbed to return a large file size From 71b232134e97fe08447dc7927fdea61a87f8d61d Mon Sep 17 00:00:00 2001 From: Rich Hodgkins Date: Sat, 28 Mar 2020 12:18:11 +0000 Subject: [PATCH 2/3] fix(bucket): only disable resumable uploads for bucket.upload when inspecting the file size (fixes #1133) --- src/bucket.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/bucket.ts b/src/bucket.ts index 103e888cf..822d0ae0c 100644 --- a/src/bucket.ts +++ b/src/bucket.ts @@ -3454,7 +3454,10 @@ class Bucket extends ServiceObject { return; } - options.resumable = fd.size > RESUMABLE_THRESHOLD; + if (fd.size <= RESUMABLE_THRESHOLD) { + // Only disable resumable uploads so createWriteStream still attempts them and falls back to simple upload. + options.resumable = false; + } upload(); }); From 0e5762161c4d0e396ac73acfb235b7722b76ae45 Mon Sep 17 00:00:00 2001 From: Rich Hodgkins Date: Wed, 29 Apr 2020 10:17:08 +0100 Subject: [PATCH 3/3] text(bucket): Applied patch from @stephenplusplus - see https://github.com/googleapis/nodejs-storage/pull/1135#issuecomment-620070038 --- test/bucket.ts | 42 +++++++++++++--------------- test/testdata/dummylargetestfile.txt | 3 -- 2 files changed, 20 insertions(+), 25 deletions(-) delete mode 100644 test/testdata/dummylargetestfile.txt diff --git a/test/bucket.ts b/test/bucket.ts index be5b0135a..51e653beb 100644 --- a/test/bucket.ts +++ b/test/bucket.ts @@ -21,6 +21,8 @@ import { } from '@google-cloud/common'; import arrify = require('arrify'); import * as assert from 'assert'; +import * as extend from 'extend'; +import * as fs from 'fs'; import {describe, it, before, beforeEach, after, afterEach} from 'mocha'; import * as mime from 'mime-types'; import pLimit from 'p-limit'; @@ -89,6 +91,13 @@ class FakeNotification { } } +let fsStatOverride: Function | null; +const fakeFs = extend(true, {}, fs, { + stat: (filePath: string, callback: Function) => { + return (fsStatOverride || fs.stat)(filePath, callback); + }, +}); + let pLimitOverride: Function | null; const fakePLimit = (limit: number) => (pLimitOverride || pLimit)(limit); @@ -172,6 +181,7 @@ describe('Bucket', () => { before(() => { Bucket = proxyquire('../src/bucket.js', { + fs: fakeFs, 'p-limit': {default: fakePLimit}, '@google-cloud/promisify': fakePromisify, '@google-cloud/paginator': fakePaginator, @@ -188,6 +198,7 @@ describe('Bucket', () => { }); beforeEach(() => { + fsStatOverride = null; pLimitOverride = null; bucket = new Bucket(STORAGE, BUCKET_NAME); }); @@ -2382,29 +2393,12 @@ describe('Bucket', () => { }); describe('resumable uploads', () => { - const dummyLargeFilepath = path.join( - __dirname, - '../../test/testdata', - 'dummylargetestfile.txt' - ); - const dummyLargeFileStat = require('fs').statSync(dummyLargeFilepath); - // Set size greater than threshold - dummyLargeFileStat.size = 5000001; - - let sandbox: sinon.SinonSandbox; - beforeEach(() => { - sandbox = sinon.createSandbox(); - - const statStub = sandbox.stub(require('fs'), 'stat'); - statStub - .withArgs(dummyLargeFilepath) - .callsArgWithAsync(1, null, dummyLargeFileStat); - statStub.callThrough(); + fsStatOverride = (path: string, callback: Function) => { + callback(null, {size: 1}); // Small size to guarantee simple upload + }; }); - afterEach(() => sandbox.restore()); - it('should force a resumable upload', done => { const fakeFile = new FakeFile(bucket, 'file-name'); const options = {destination: fakeFile, resumable: true}; @@ -2423,16 +2417,20 @@ describe('Bucket', () => { it('should not pass resumable option to createWriteStream when file size is greater than minimum resumable threshold', done => { const fakeFile = new FakeFile(bucket, 'file-name'); const options = {destination: fakeFile}; + fsStatOverride = (path: string, callback: Function) => { + // Set size greater than threshold + callback(null, {size: 5000001}); + }; fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { const ws = new stream.Writable(); ws.write = () => true; setImmediate(() => { - assert.ok(!('resumable' in options_)); + assert.strictEqual(typeof options_.resumable, 'undefined'); done(); }); return ws; }; - bucket.upload(dummyLargeFilepath, options, assert.ifError); + bucket.upload(filepath, options, assert.ifError); }); it('should prevent resumable when file size is less than minimum resumable threshold', done => { diff --git a/test/testdata/dummylargetestfile.txt b/test/testdata/dummylargetestfile.txt deleted file mode 100644 index c4d462324..000000000 --- a/test/testdata/dummylargetestfile.txt +++ /dev/null @@ -1,3 +0,0 @@ -This is a test file! - -When using fs.stat should be stubbed to return a large file size