Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fs: support special files in promises.readFile #21497

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 9 additions & 8 deletions lib/internal/fs/promises.js
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,10 @@ async function writeFileHandle(filehandle, data, options) {
} while (remaining > 0);
}

// Note: This is different from kReadFileBufferLength used for non-promisified
// fs.readFile.
const kReadFileMaxChunkSize = 16384;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Out of curiosity, is there context for this?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not really, it was there when I found it so I kept it that way. I just thought it was interesting to note.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess we'd have to ask @jasnell about it.


async function readFileHandle(filehandle, options) {
const statFields = await binding.fstat(filehandle.fd, false, kUsePromises);

Expand All @@ -135,22 +139,19 @@ async function readFileHandle(filehandle, options) {
size = 0;
}

if (size === 0)
return options.encoding ? '' : Buffer.alloc(0);

if (size > kMaxLength)
throw new ERR_FS_FILE_TOO_LARGE(size);

const chunks = [];
const chunkSize = Math.min(size, 16384);
let totalRead = 0;
const chunkSize = size === 0 ?
kReadFileMaxChunkSize :
Math.min(size, kReadFileMaxChunkSize);
let endOfFile = false;
do {
const buf = Buffer.alloc(chunkSize);
const { bytesRead, buffer } =
await read(filehandle, buf, 0, chunkSize, totalRead);
totalRead += bytesRead;
endOfFile = bytesRead !== chunkSize;
await read(filehandle, buf, 0, chunkSize, -1);
endOfFile = bytesRead === 0;
if (bytesRead > 0)
chunks.push(buffer.slice(0, bytesRead));
} while (!endOfFile);
Expand Down
17 changes: 17 additions & 0 deletions test/parallel/test-fs-promises-file-handle-readFile.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,22 @@ async function validateReadFile() {
assert.deepStrictEqual(buffer, readFileData);
}

async function validateReadFileProc() {
// Test to make sure reading a file under the /proc directory works. Adapted
// from test-fs-read-file-sync-hostname.js.
// Refs:
// - https://groups.google.com/forum/#!topic/nodejs-dev/rxZ_RoH1Gn0
// - https://github.com/nodejs/node/issues/21331

// Test is Linux-specific.
if (!common.isLinux)
return;

const fileHandle = await open('/proc/sys/kernel/hostname', 'r');
const hostname = await fileHandle.readFile();
assert.ok(hostname.length > 0);
}

validateReadFile()
.then(() => validateReadFileProc())
.then(common.mustCall());
44 changes: 31 additions & 13 deletions test/parallel/test-fs-promises-readfile.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,35 @@ const fn = path.join(tmpdir.path, 'large-file');

common.crashOnUnhandledRejection();

// Creating large buffer with random content
const buffer = Buffer.from(
Array.apply(null, { length: 16834 * 2 })
.map(Math.random)
.map((number) => (number * (1 << 8)))
);

// Writing buffer to a file then try to read it
writeFile(fn, buffer)
.then(() => readFile(fn))
.then((readBuffer) => {
assert.strictEqual(readBuffer.equals(buffer), true);
})
async function validateReadFile() {
// Creating large buffer with random content
const buffer = Buffer.from(
Array.apply(null, { length: 16834 * 2 })
.map(Math.random)
.map((number) => (number * (1 << 8)))
);

// Writing buffer to a file then try to read it
await writeFile(fn, buffer);
const readBuffer = await readFile(fn);
assert.strictEqual(readBuffer.equals(buffer), true);
}

async function validateReadFileProc() {
// Test to make sure reading a file under the /proc directory works. Adapted
// from test-fs-read-file-sync-hostname.js.
// Refs:
// - https://groups.google.com/forum/#!topic/nodejs-dev/rxZ_RoH1Gn0
// - https://github.com/nodejs/node/issues/21331

// Test is Linux-specific.
if (!common.isLinux)
return;

const hostname = await readFile('/proc/sys/kernel/hostname');
assert.ok(hostname.length > 0);
}

validateReadFile()
.then(() => validateReadFileProc())
.then(common.mustCall());