From 16f5a97ed154889cf568305642d9e6ed1c00ef92 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Tue, 8 Jun 2021 19:44:33 +0200 Subject: [PATCH] fixup! tools: update doctool dependencies, migrate to ESM --- test/doctool/test-make-doc.mjs | 16 ++++++------ tools/doc/addon-verify.mjs | 45 ++++++++++++++-------------------- tools/doc/allhtml.mjs | 9 +++---- tools/doc/alljson.mjs | 9 +++---- tools/doc/generate.mjs | 4 +-- tools/doc/html.mjs | 7 +++--- tools/doc/stability.mjs | 14 +++++------ tools/doc/versions.mjs | 8 +++--- 8 files changed, 47 insertions(+), 65 deletions(-) diff --git a/test/doctool/test-make-doc.mjs b/test/doctool/test-make-doc.mjs index a3b6c0c24b1590..06ec6e028bf4e8 100644 --- a/test/doctool/test-make-doc.mjs +++ b/test/doctool/test-make-doc.mjs @@ -3,7 +3,6 @@ import * as common from '../common/index.mjs'; import assert from 'assert'; import fs from 'fs'; import path from 'path'; -import { fileURLToPath } from 'url'; if (common.isWindows) { common.skip('`make doc` does not run on Windows'); @@ -12,10 +11,10 @@ if (common.isWindows) { // This tests that `make doc` generates the documentation properly. // Note that for this test to pass, `make doc` must be run first. -const apiPath = fileURLToPath(new URL('../../out/doc/api', import.meta.url)); -const mdPath = fileURLToPath(new URL('../../doc/api', import.meta.url)); -const allMD = fs.readdirSync(mdPath); -const allDocs = fs.readdirSync(apiPath); +const apiURL = new URL('../../out/doc/api/', import.meta.url); +const mdURL = new URL('../../doc/api/', import.meta.url); +const allMD = fs.readdirSync(mdURL); +const allDocs = fs.readdirSync(apiURL); assert.ok(allDocs.includes('index.html')); const actualDocs = allDocs.filter( @@ -34,7 +33,7 @@ for (const name of actualDocs) { ); } -const toc = fs.readFileSync(path.resolve(apiPath, 'index.html'), 'utf8'); +const toc = fs.readFileSync(new URL('./index.html', apiURL), 'utf8'); const re = /href="([^/]+\.html)"/; const globalRe = new RegExp(re, 'g'); const links = toc.match(globalRe); @@ -57,8 +56,9 @@ for (const actualDoc of actualDocs) { assert.ok( expectedDocs.includes(actualDoc), `${actualDoc} does not match TOC`); - assert.ok( - fs.statSync(path.join(apiPath, actualDoc)).size !== 0, + assert.notStrictEqual( + fs.statSync(new URL(`./${actualDoc}`, apiURL)).size, + 0, `${actualDoc} is empty` ); } diff --git a/tools/doc/addon-verify.mjs b/tools/doc/addon-verify.mjs index f626364d7ecfc0..ee4d597693b8b3 100644 --- a/tools/doc/addon-verify.mjs +++ b/tools/doc/addon-verify.mjs @@ -4,18 +4,16 @@ // Modify the require paths in the js code to pull from the build tree. // Triggered from the build-addons target in the Makefile and vcbuild.bat. -import { mkdir, writeFile } from 'fs'; -import { resolve } from 'path'; -import { fileURLToPath } from 'url'; +import { mkdir, writeFile } from 'fs/promises'; import gfm from 'remark-gfm'; import remarkParse from 'remark-parse'; import { toVFile } from 'to-vfile'; import unified from 'unified'; -const rootDir = fileURLToPath(new URL('../..', import.meta.url)); -const doc = resolve(rootDir, 'doc', 'api', 'addons.md'); -const verifyDir = resolve(rootDir, 'test', 'addons'); +const rootDir = new URL('../../', import.meta.url); +const doc = new URL('./doc/api/addons.md', rootDir); +const verifyDir = new URL('./test/addons/', rootDir); const file = toVFile.readSync(doc, 'utf8'); const tree = unified().use(remarkParse).use(gfm).parse(file); @@ -38,9 +36,10 @@ tree.children.forEach((node) => { } }); -Object.keys(addons).forEach((header) => { - verifyFiles(addons[header].files, header); -}); +await Promise.all( + Object.keys(addons).flatMap( + (header) => verifyFiles(addons[header].files, header) + )); function verifyFiles(files, blockName) { const fileNames = Object.keys(files); @@ -48,13 +47,13 @@ function verifyFiles(files, blockName) { // Must have a .cc and a .js to be a valid test. if (!fileNames.some((name) => name.endsWith('.cc')) || !fileNames.some((name) => name.endsWith('.js'))) { - return; + return []; } blockName = blockName.toLowerCase().replace(/\s/g, '_').replace(/\W/g, ''); - const dir = resolve( + const dir = new URL( + `./${String(++id).padStart(2, '0')}_${blockName}/`, verifyDir, - `${String(++id).padStart(2, '0')}_${blockName}` ); files = fileNames.map((name) => { @@ -68,14 +67,14 @@ ${files[name].replace( `; } return { - path: resolve(dir, name), - name: name, - content: files[name] + content: files[name], + name, + url: new URL(`./${name}`, dir), }; }); files.push({ - path: resolve(dir, 'binding.gyp'), + url: new URL('./binding.gyp', dir), content: JSON.stringify({ targets: [ { @@ -87,16 +86,8 @@ ${files[name].replace( }) }); - mkdir(dir, () => { - // Ignore errors. - - files.forEach(({ path, content }) => { - writeFile(path, content, (err) => { - if (err) - throw err; + const dirCreation = mkdir(dir); - console.log(`Wrote ${path}`); - }); - }); - }); + return files.map(({ url, content }) => + dirCreation.then(() => writeFile(url, content))); } diff --git a/tools/doc/allhtml.mjs b/tools/doc/allhtml.mjs index 1aadb541ade4a3..54a51dd6316010 100644 --- a/tools/doc/allhtml.mjs +++ b/tools/doc/allhtml.mjs @@ -2,16 +2,15 @@ // of the generated html files. import fs from 'fs'; -import { fileURLToPath } from 'url'; -const source = fileURLToPath(new URL('../../out/doc/api', import.meta.url)); +const source = new URL('../../out/doc/api/', import.meta.url); // Get a list of generated API documents. const htmlFiles = fs.readdirSync(source, 'utf8') .filter((name) => name.includes('.html') && name !== 'all.html'); // Read the table of contents. -const toc = fs.readFileSync(source + '/index.html', 'utf8'); +const toc = fs.readFileSync(new URL('./index.html', source), 'utf8'); // Extract (and concatenate) the toc and apicontent from each document. let contents = ''; @@ -27,7 +26,7 @@ const seen = { for (const link of toc.match(//g)) { const href = /href="(.*?)"/.exec(link)[1]; if (!htmlFiles.includes(href) || seen[href]) continue; - const data = fs.readFileSync(source + '/' + href, 'utf8'); + const data = fs.readFileSync(new URL(`./${href}`, source), 'utf8'); // Split the doc. const match = /(<\/ul>\s*)?<\/\w+>\s*<\w+ id="apicontent">/.exec(data); @@ -73,7 +72,7 @@ all = all.slice(0, apiStart.index + apiStart[0].length) + all.slice(apiEnd); // Write results. -fs.writeFileSync(source + '/all.html', all, 'utf8'); +fs.writeFileSync(new URL('./all.html', source), all, 'utf8'); // Validate all hrefs have a target. const ids = new Set(); diff --git a/tools/doc/alljson.mjs b/tools/doc/alljson.mjs index a5bb0f57321a00..18afc29e7e9366 100644 --- a/tools/doc/alljson.mjs +++ b/tools/doc/alljson.mjs @@ -2,16 +2,15 @@ // from the generated json files. import fs from 'fs'; -import { fileURLToPath } from 'url'; -const source = fileURLToPath(new URL('../../out/doc/api', import.meta.url)); +const source = new URL('../../out/doc/api/', import.meta.url); // Get a list of generated API documents. const jsonFiles = fs.readdirSync(source, 'utf8') .filter((name) => name.includes('.json') && name !== 'all.json'); // Read the table of contents. -const toc = fs.readFileSync(source + '/index.html', 'utf8'); +const toc = fs.readFileSync(new URL('./index.html', source), 'utf8'); // Initialize results. Only these four data values will be collected. const results = { @@ -36,7 +35,7 @@ for (const link of toc.match(//g)) { const json = href.replace('.html', '.json'); if (!jsonFiles.includes(json) || seen[json]) continue; const data = JSON.parse( - fs.readFileSync(source + '/' + json, 'utf8') + fs.readFileSync(new URL(`./${json}`, source), 'utf8') .replace(//g)) { } // Write results. -fs.writeFileSync(source + '/all.json', +fs.writeFileSync(new URL('./all.json', source), `${JSON.stringify(results, null, 2)}\n`, 'utf8'); diff --git a/tools/doc/generate.mjs b/tools/doc/generate.mjs index b05f741c4c3382..3bf60878ab5e27 100644 --- a/tools/doc/generate.mjs +++ b/tools/doc/generate.mjs @@ -28,14 +28,12 @@ import gfm from 'remark-gfm'; import markdown from 'remark-parse'; import remark2rehype from 'remark-rehype'; import unified from 'unified'; -import { fileURLToPath } from 'url'; import * as html from './html.mjs'; import * as json from './json.mjs'; import { replaceLinks } from './markdown.mjs'; -const linksMapperFile = fileURLToPath( - new URL('links-mapper.json', import.meta.url)); +const linksMapperFile = new URL('links-mapper.json', import.meta.url); const linksMapper = JSON.parse(readFileSync(linksMapperFile, 'utf8')); // Parse the args. diff --git a/tools/doc/html.mjs b/tools/doc/html.mjs index 9bc60ee0e8771f..d1662a08c44924 100644 --- a/tools/doc/html.mjs +++ b/tools/doc/html.mjs @@ -21,7 +21,6 @@ import fs from 'fs'; import path from 'path'; -import { fileURLToPath } from 'url'; import highlightJs from 'highlight.js'; import raw from 'rehype-raw'; @@ -37,7 +36,7 @@ import * as typeParser from './type-parser.mjs'; const { highlight, getLanguage } = highlightJs; -const docPath = fileURLToPath(new URL('../../doc', import.meta.url)); +const docPath = new URL('../../doc/', import.meta.url); // Add class attributes to index navigation links. function navClasses() { @@ -49,7 +48,7 @@ function navClasses() { }; } -const gtocPath = path.join(docPath, 'api', 'index.md'); +const gtocPath = new URL('./api/index.md', docPath); const gtocMD = fs.readFileSync(gtocPath, 'utf8') .replace(/\(([^#?]+?)\.md\)/ig, (_, filename) => `(${filename}.html)`) .replace(/^/gms, ''); @@ -62,7 +61,7 @@ const gtocHTML = unified() .use(htmlStringify) .processSync(gtocMD).toString(); -const templatePath = path.join(docPath, 'template.html'); +const templatePath = new URL('./template.html', docPath); const template = fs.readFileSync(templatePath, 'utf8'); function processContent(content) { diff --git a/tools/doc/stability.mjs b/tools/doc/stability.mjs index 008e5cf56d7a48..6b5f182a76e7e0 100644 --- a/tools/doc/stability.mjs +++ b/tools/doc/stability.mjs @@ -1,8 +1,6 @@ // Build stability table to documentation.html/json/md by generated all.json import fs from 'fs'; -import path from 'path'; -import { fileURLToPath } from 'url'; import raw from 'rehype-raw'; import htmlStringify from 'rehype-stringify'; @@ -12,17 +10,17 @@ import remark2rehype from 'remark-rehype'; import unified from 'unified'; import { visit } from 'unist-util-visit'; -const source = fileURLToPath(new URL('../../out/doc/api', import.meta.url)); -const data = JSON.parse(fs.readFileSync(path.join(source, 'all.json'), 'utf8')); +const source = new URL('../../out/doc/api/', import.meta.url); +const data = JSON.parse(fs.readFileSync(new URL('./all.json', source), 'utf8')); const markBegin = ''; const markEnd = ''; const mark = `${markBegin}(.*)${markEnd}`; const output = { - json: path.join(source, 'stability.json'), - docHTML: path.join(source, 'documentation.html'), - docJSON: path.join(source, 'documentation.json'), - docMarkdown: path.join(source, 'documentation.md'), + json: new URL('./stability.json', source), + docHTML: new URL('./documentation.html', source), + docJSON: new URL('./documentation.json', source), + docMarkdown: new URL('./documentation.md', source), }; function collectStability(data) { diff --git a/tools/doc/versions.mjs b/tools/doc/versions.mjs index 0324f6d106cbd3..ec623a2dbd206e 100644 --- a/tools/doc/versions.mjs +++ b/tools/doc/versions.mjs @@ -1,13 +1,11 @@ import { readFileSync, writeFileSync } from 'fs'; import https from 'https'; -import path from 'path'; -import { fileURLToPath } from 'url'; -const srcRoot = fileURLToPath(new URL('../..', import.meta.url)); +const srcRoot = new URL('../../', import.meta.url); const isRelease = () => { const re = /#define NODE_VERSION_IS_RELEASE 0/; - const file = path.join(srcRoot, 'src', 'node_version.h'); + const file = new URL('./src/node_version.h', srcRoot); return !re.test(readFileSync(file, { encoding: 'utf8' })); }; @@ -38,7 +36,7 @@ async function versions() { const url = 'https://raw.githubusercontent.com/nodejs/node/HEAD/CHANGELOG.md'; let changelog; - const file = path.join(srcRoot, 'CHANGELOG.md'); + const file = new URL('./CHANGELOG.md', srcRoot); if (kNoInternet) { changelog = readFileSync(file, { encoding: 'utf8' }); } else {