Skip to content

Commit

Permalink
feat: enable i18n
Browse files Browse the repository at this point in the history
Integrate with Crowdin to download the latest translations and generate
the site for the supported locales.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

Fix #64
  • Loading branch information
molant committed Jul 21, 2021
1 parent 5e4353e commit d2c1c84
Show file tree
Hide file tree
Showing 7 changed files with 348 additions and 29 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@ node_modules
.vscode/settings.json
build/
content/
docs/
docs/
i18n/
!i18n/en-US/
17 changes: 17 additions & 0 deletions docusaurus.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,19 @@ module.exports = {
favicon: 'assets/img/favicon.ico',
organizationName: 'electron',
projectName: 'electron',
i18n: {
defaultLocale: 'en',
locales: [
'de-de',
'en',
'es-es',
'fr-fr',
'ja-jp',
'pt-br',
'ru-ru',
'zh-cn',
],
},
themeConfig: {
announcementBar: {
id: 'to_old_docs',
Expand Down Expand Up @@ -46,6 +59,10 @@ module.exports = {
activeBaseRegex: '^\b$', // never active
},
{ to: 'blog', label: 'Blog', position: 'left' },
{
type: 'localeDropdown',
position: 'right',
},
{
href: 'https://github.com/electron/electron',
label: 'GitHub',
Expand Down
9 changes: 6 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,17 @@
"prepare": "husky install"
},
"dependencies": {
"@crowdin/crowdin-api-client": "^1.11.2",
"@docusaurus/core": "^2.0.0-beta.3",
"@docusaurus/plugin-google-analytics": "^2.0.0-beta.3",
"@docusaurus/preset-classic": "^2.0.0-beta.3",
"@docusaurus/remark-plugin-npm2yarn": "^2.0.0-beta.3",
"@mdx-js/react": "^1.6.21",
"D": "^1.0.0",
"clsx": "^1.1.1",
"dotenv-safe": "^8.2.0",
"execa": "^5.0.0",
"react": "^17.0.1",
"react-dom": "^17.0.1"
"react-dom": "^17.0.1",
"unzipper": "^0.10.11"
},
"browserslist": {
"production": [
Expand All @@ -51,6 +52,8 @@
"@types/jest": "^26.0.23",
"@types/unist": "^2.0.3",
"del": "^6.0.0",
"dotenv": "^10.0.0",
"execa": "^5.0.0",
"fs-extra": "^9.1.0",
"globby": "^11.0.3",
"got": "^11.8.2",
Expand Down
49 changes: 40 additions & 9 deletions scripts/pre-build.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,13 @@
* right places, and transform it to make it ready to
* be used by docusaurus.
*/
if (process.env.NODE_ENV !== 'production') {
require('dotenv').config();
}

const path = require('path');
const { existsSync } = require('fs');
const fs = require('fs-extra');

const del = require('del');
const latestVersion = require('latest-version');
Expand All @@ -17,8 +22,10 @@ const { createSidebar } = require('./tasks/create-sidebar');
const { fixContent } = require('./tasks/md-fixers');
const { copyNewContent } = require('./tasks/copy-new-content');
const { sha } = require('../package.json');
const { downloadTranslations } = require('./tasks/download-translations');

const DOCS_FOLDER = path.join('docs', 'latest');
const I18N_FOLDER = 'i18n';
// const BLOG_FOLDER = 'blog';

/**
Expand Down Expand Up @@ -64,15 +71,6 @@ const start = async (source) => {
return process.exit(-1);
}

// TODO: Uncoment once we have the blog enabled
// console.log(`Downloading posts`);
// await download({
// target: 'master',
// repository: 'electronjs.org',
// destination: BLOG_FOLDER,
// downloadMatch: 'data/blog',
// });

console.log('Copying new content');
await copyNewContent(DOCS_FOLDER);

Expand All @@ -84,6 +82,39 @@ const start = async (source) => {

console.log('Updating sidebar.js');
await createSidebar('docs', path.join(process.cwd(), 'sidebars.js'));

console.log('Downloading translations');
const locales = await downloadTranslations(I18N_FOLDER);

for (const locale of locales) {
const localeDocs = path.join(
I18N_FOLDER,
locale,
'docusaurus-plugin-content-docs',
'current'
);
const staticResources = ['fiddles', 'images'];

console.log(`Copying static assets to ${locale}`);
for (const staticResource of staticResources) {
await fs.copy(
path.join(DOCS_FOLDER, staticResource),
path.join(localeDocs, 'latest', staticResource)
);
}

console.log(`Fixing markdown (${locale})`);
await fixContent(localeDocs, 'latest');

console.log(`Adding automatic frontmatter (${locale})`);
await addFrontmatter(path.join(localeDocs, 'latest'));
}
};

process.on('unhandledRejection', (error) => {
console.error(error);

process.exit(1);
});

start(process.argv[2]);
170 changes: 170 additions & 0 deletions scripts/tasks/download-translations.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
//@ts-check
const stream = require('stream');
const { join } = require('path');
const { promisify } = require('util');
const pipeline = promisify(stream.pipeline);
const got = require('got').default;
const unzipper = require('unzipper');
const { tmpdir } = require('os');
const fs = require('fs-extra');

// Assumes running from the root of the repo
const OUTPUT_PATH = join(process.cwd(), 'temp-i18n');
const { CROWDIN_TOKEN, CROWDIN_PROJECT_ID } = process.env;
const PROJECT_ID = parseInt(CROWDIN_PROJECT_ID);
const crowdin = require('@crowdin/crowdin-api-client').default;

const { translationsApi } = new crowdin({
token: CROWDIN_TOKEN,
});

/**
* Downloads the Crowdin file and unzips the contents
* @param {string} url
* @param {string} destination
*/
const downloadFiles = async (url, destination) => {
const tmpPath = join(
tmpdir(),
'electronjs.org',
`${Math.ceil(Math.random() * 1000)}`
);

await pipeline(got.stream(url), unzipper.Extract({ path: tmpPath }));

const contentPath = join(tmpPath, `[electron.i18n] master`, `content`);

const locales = await fs.readdir(contentPath);

for (const locale of locales) {
const localeDestination = join(destination, locale.toLowerCase());
const docsDestination = join(
localeDestination,
'docusaurus-plugin-content-docs',
'current',
'latest'
);
await fs.mkdirp(docsDestination);

await fs.copy(join(contentPath, locale, 'docs'), docsDestination);

await fs.copy(
join(contentPath, locale, 'website', 'i18n'),

localeDestination
);
}

return locales.map((locale) => locale.toLowerCase());
};

/**
* Waits for the given number of seconds
* @param {number} seconds
* @returns
*/
const waitFor = (seconds) => {
return new Promise((resolve) => {
setTimeout(() => {
resolve();
}, seconds * 1000);
});
};

/**
* @param {number} buildId
* @returns
*/
const getBuild = async (buildId) => {
const builds = await translationsApi.listProjectBuilds(PROJECT_ID);
const build = builds.data.find((item) => item.data.id === buildId);

return build.data;
};

/**
* Kicks a build for `PROJECT_ID` and returns the download link once it
* has finished.
* If a build is not kicked we risked downloading outdated files.
* @param {number} projectId
*/
const buildAndDownloadLink = async (projectId) => {
const {
data: { id: buildId },
} = await translationsApi.buildProject(projectId);

let counter = 10;
let interval = 30;
let build;

for (let i = 0; i < counter; i++) {
build = await getBuild(buildId);
console.log(`Crowdin status: Project ${buildId} - ${build.status}`);

if (build.status === 'finished') {
break;
} else {
console.log(
`Crowdin status: Waiting ${interval} seconds (retry ${i}/${counter})`
);
await waitFor(interval);
}
}

if (build.status !== 'finished') {
throw new Error(`The project didn't build fast enough on Crowdin`);
}

const {
data: { url },
} = await translationsApi.downloadTranslations(PROJECT_ID, buildId);

return url;
};

/**
* Gets the download link for the latest available build
* @param {number} projectId
*/
const getLatestBuildLink = async (projectId) => {
const { data: builds } = await translationsApi.listProjectBuilds(projectId);

const {
data: { url },
} = await translationsApi.downloadTranslations(PROJECT_ID, builds[0].data.id);

return url;
};

/**
* Downloads the translations into the given target
* or the default one otherwise.
* @param {string} [target]
*/
const downloadTranslations = async (target) => {
let downloadLink = '';
if (process.env.NODE_ENV === 'production') {
console.log(
`Starting a Crowdin build to download translations. This might take a few minutes.`
);
downloadLink = await buildAndDownloadLink(PROJECT_ID);
} else {
console.log(`Downloading the latest available Crowdin build.`);
downloadLink = await getLatestBuildLink(PROJECT_ID);
}

const destination = target || OUTPUT_PATH;
return await downloadFiles(downloadLink, destination);
};

// When a file is run directly from Node.js, `require.main` is set to its module.
// That means that it is possible to determine whether a file has been run directly
// by testing `require.main === module`.
// https://nodejs.org/docs/latest/api/modules.html#modules_accessing_the_main_module
if (require.main === module) {
downloadTranslations();
}

module.exports = {
downloadTranslations,
};
15 changes: 14 additions & 1 deletion scripts/tasks/md-fixers.js
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,19 @@ const fiddleTransformer = (line) => {
}
};

/**
* Crowdin translations put markdown content right
* after HTML comments and thus breaking Docusaurus
* parse engine. We need to add a new EOL after `-->`
* is found.
*/
const newLineOnHTMLComment = (line) => {
if (line.includes('-->')) {
return line.replace('-->', '-->\n');
}
return line;
};

/**
* Applies any transformation that can be executed line by line on
* the document to make sure it is ready to be consumed by
Expand All @@ -83,7 +96,7 @@ const fiddleTransformer = (line) => {
const transform = (doc) => {
const lines = doc.split('\n');
const newDoc = [];
const transformers = [apiTransformer, fiddleTransformer];
const transformers = [apiTransformer, fiddleTransformer, newLineOnHTMLComment];

for (const line of lines) {
const newLine = transformers.reduce((newLine, transformer) => {
Expand Down
Loading

0 comments on commit d2c1c84

Please sign in to comment.