-
Notifications
You must be signed in to change notification settings - Fork 8
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
17 changed files
with
2,642 additions
and
2,431 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,55 +1,53 @@ | ||
// This file is only needed to update test 262 used to test this polyfill | ||
// by using the command: `$ grunt update-test262` | ||
module.exports = function (grunt) { | ||
|
||
grunt.initConfig({ | ||
pkg: grunt.file.readJSON('package.json'), | ||
|
||
clean: { | ||
test262: ['tmp/test262**', 'data/test262**', 'tests/test262/'] | ||
}, | ||
|
||
curl: { | ||
test262: { | ||
src : 'https://github.com/tc39/test262/archive/master.zip', | ||
dest: 'tmp/test262.zip' | ||
} | ||
}, | ||
|
||
unzip: { | ||
test262: { | ||
src : 'tmp/test262.zip', | ||
dest: 'tmp/' | ||
} | ||
}, | ||
|
||
copy: { | ||
test262: { | ||
expand: true, | ||
cwd : 'tmp/test262-master/', | ||
dest : 'tests/test262', | ||
src : [ | ||
'LICENSE', | ||
'test/intl402/**/*.js', | ||
'harness/*.js' | ||
] | ||
} | ||
} | ||
|
||
}); | ||
|
||
grunt.loadTasks('./tasks'); | ||
grunt.loadNpmTasks('grunt-contrib-clean'); | ||
grunt.loadNpmTasks('grunt-contrib-copy'); | ||
grunt.loadNpmTasks('grunt-curl'); | ||
grunt.loadNpmTasks('grunt-zip'); | ||
|
||
grunt.registerTask('update-test262', [ | ||
'clean:test262', | ||
'curl:test262', | ||
'unzip:test262', | ||
'copy:test262', | ||
'update-tests' | ||
]); | ||
|
||
}; | ||
module.exports = function(grunt) { | ||
grunt.initConfig({ | ||
pkg: grunt.file.readJSON("package.json"), | ||
|
||
clean: { | ||
test262: [ "tmp/test262**", "data/test262**", "tests/test262/" ] | ||
}, | ||
|
||
curl: { | ||
test262: { | ||
src: "https://github.com/tc39/test262/archive/master.zip", | ||
dest: "tmp/test262.zip" | ||
} | ||
}, | ||
|
||
unzip: { | ||
test262: { | ||
src: "tmp/test262.zip", | ||
dest: "tmp/" | ||
} | ||
}, | ||
|
||
copy: { | ||
test262: { | ||
expand: true, | ||
cwd: "tmp/test262-master/", | ||
dest: "tests/test262", | ||
src: [ | ||
"LICENSE", | ||
"test/intl402/**/*.js", | ||
"harness/*.js" | ||
] | ||
} | ||
} | ||
|
||
}) | ||
|
||
grunt.loadTasks("./tasks") | ||
grunt.loadNpmTasks("grunt-contrib-clean") | ||
grunt.loadNpmTasks("grunt-contrib-copy") | ||
grunt.loadNpmTasks("grunt-curl") | ||
grunt.loadNpmTasks("grunt-zip") | ||
|
||
grunt.registerTask("update-test262", [ | ||
"clean:test262", | ||
"curl:test262", | ||
"unzip:test262", | ||
"copy:test262", | ||
"update-tests" | ||
]) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,178 +1,178 @@ | ||
/* global Promise */ | ||
import * as fs from 'fs'; | ||
import * as p from 'path'; | ||
import {sync as mkdirpSync} from 'mkdirp'; | ||
import * as fs from "fs" | ||
import * as p from "path" | ||
import { sync as mkdirpSync } from "mkdirp" | ||
|
||
function writeFile(filename, contents) { | ||
return new Promise((resolve, reject) => { | ||
fs.writeFile(filename, contents, (err) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(p.resolve(filename)); | ||
} | ||
}); | ||
}); | ||
return new Promise((resolve, reject) => { | ||
fs.writeFile(filename, contents, (err) => { | ||
if (err) { | ||
reject(err) | ||
} else { | ||
resolve(p.resolve(filename)) | ||
} | ||
}) | ||
}) | ||
} | ||
|
||
function mergeData(...sources) { | ||
return sources.reduce((data, source) => { | ||
Object.keys(source || {}).forEach((locale) => { | ||
data[locale] = Object.assign(data[locale] || {}, source[locale]); | ||
}); | ||
return sources.reduce((data, source) => { | ||
Object.keys(source || {}).forEach((locale) => { | ||
data[locale] = Object.assign(data[locale] || {}, source[locale]) | ||
}) | ||
|
||
return data; | ||
}, {}); | ||
return data | ||
}, {}) | ||
} | ||
|
||
function reviver (k, v) { | ||
let idx; | ||
function reviver(k, v) { | ||
let idx | ||
|
||
if (k === 'locale') | ||
return v; | ||
if (k === "locale") | ||
return v | ||
|
||
else if (typeof v === 'string') { | ||
idx = prims.indexOf(v); | ||
valCount++; | ||
else if (typeof v === "string") { | ||
idx = prims.indexOf(v) | ||
valCount++ | ||
|
||
if (idx === -1) | ||
idx += prims.push(v); | ||
if (idx === -1) | ||
idx += prims.push(v) | ||
|
||
return '###prims['+ idx +']###'; | ||
} | ||
return `###prims[${ idx }]###` | ||
} | ||
|
||
else if (typeof v === 'object' && v !== null) { | ||
const str = JSON.stringify(v); | ||
objCount++; | ||
else if (typeof v === "object" && v !== null) { | ||
const str = JSON.stringify(v) | ||
objCount++ | ||
|
||
if (objStrs.hasOwnProperty(str)) | ||
return objStrs[str]; | ||
if (objStrs.hasOwnProperty(str)) | ||
return objStrs[str] | ||
|
||
// We need to make sure this object is not added to the same | ||
// array as an object it references (and we need to check | ||
// this recursively) | ||
let depth; | ||
let objDepths = [0]; | ||
let depth | ||
let objDepths = [ 0 ] | ||
|
||
for (let key in v) { | ||
if (typeof v[key] === 'string' && (depth = v[key].match(/^###objs\[(\d+)/))) | ||
objDepths.push(+depth[1] + 1); | ||
} | ||
for (let key in v) { | ||
if (typeof v[key] === "string" && (depth = v[key].match(/^###objs\[(\d+)/))) | ||
objDepths.push(+depth[1] + 1) | ||
} | ||
|
||
depth = Math.max.apply(Math, objDepths); | ||
depth = Math.max(...objDepths) | ||
|
||
if (!Array.isArray(objs[depth])) | ||
objs[depth] = []; | ||
if (!Array.isArray(objs[depth])) | ||
objs[depth] = [] | ||
|
||
idx = objs[depth].push(v) - 1; | ||
objStrs[str] = '###objs['+ depth +']['+ idx +']###'; | ||
idx = objs[depth].push(v) - 1 | ||
objStrs[str] = `###objs[${ depth }][${ idx }]###` | ||
|
||
return objStrs[str]; | ||
} | ||
return objStrs[str] | ||
} | ||
|
||
return v; | ||
return v | ||
} | ||
|
||
// ----------------------------------------------------------------------------- | ||
|
||
mkdirpSync('locale-data/'); | ||
mkdirpSync('locale-data/json/'); | ||
mkdirpSync('locale-data/jsonp/'); | ||
mkdirpSync("locale-data/") | ||
mkdirpSync("locale-data/json/") | ||
mkdirpSync("locale-data/jsonp/") | ||
|
||
// extracting data into CLDR | ||
|
||
// Regex for converting locale JSON to object grammar, obviously simple and | ||
// incomplete but should be good enough for the CLDR JSON | ||
const jsonpExp = /"(?!default)([\w$][\w\d$]+)":/g; | ||
const jsonpExp = /"(?!default)([\w$][\w\d$]+)":/g | ||
|
||
import reduceLocaleData from './utils/reduce'; | ||
import reduceLocaleData from "./utils/reduce" | ||
|
||
import extractCalendars from './utils/extract-calendars'; | ||
import extractNumbersFields from './utils/extract-numbers'; | ||
import {getAllLocales} from './utils/locales'; | ||
import extractCalendars from "./utils/extract-calendars" | ||
import extractNumbersFields from "./utils/extract-numbers" | ||
import { getAllLocales } from "./utils/locales" | ||
|
||
// Default to all CLDR locales. | ||
const locales = getAllLocales(); | ||
const locales = getAllLocales() | ||
|
||
// Each type of data has the structure: `{"<locale>": {"<key>": <value>}}`, | ||
// which is well suited for merging into a single object per locale. This | ||
// performs that deep merge and returns the aggregated result. | ||
let locData = mergeData( | ||
extractCalendars(locales), | ||
extractNumbersFields(locales) | ||
); | ||
extractCalendars(locales), | ||
extractNumbersFields(locales) | ||
) | ||
|
||
let locStringData = {}; | ||
let locStringData = {} | ||
|
||
Object.keys(locData).forEach((locale) => { | ||
// Ignore en-US-POSIX and root | ||
if (locale.toLowerCase() === 'en-us-posix') { | ||
return; | ||
} | ||
if (locale.toLowerCase() === "en-us-posix") { | ||
return | ||
} | ||
|
||
const obj = reduceLocaleData(locale, locData[locale]); | ||
locStringData[locale] = JSON.stringify(obj, null, 4); | ||
const jsonpContent = `IntlPolyfill.__addLocaleData(${JSON.stringify(obj).replace(jsonpExp, '$1:')});`; | ||
writeFile('locale-data/json/' + locale + '.json', locStringData[locale]); | ||
writeFile('locale-data/jsonp/' + locale + '.js', jsonpContent); | ||
}); | ||
const obj = reduceLocaleData(locale, locData[locale]) | ||
locStringData[locale] = JSON.stringify(obj, null, 4) | ||
const jsonpContent = `IntlPolyfill.__addLocaleData(${JSON.stringify(obj).replace(jsonpExp, "$1:")});` | ||
writeFile(`locale-data/json/${ locale }.json`, locStringData[locale]) | ||
writeFile(`locale-data/jsonp/${ locale }.js`, jsonpContent) | ||
}) | ||
|
||
console.log('Total number of locales is ' + Object.keys(locData).length); | ||
console.log(`Total number of locales is ${ Object.keys(locData).length}`) | ||
|
||
// compiling `locale-date/complete.js` | ||
|
||
function replacer($0, type, loc) { | ||
return (type === 'prims' ? 'a' : 'b') + loc; | ||
return (type === "prims" ? "a" : "b") + loc | ||
} | ||
|
||
let | ||
objStrs = {}, | ||
objs = [], | ||
prims = [], | ||
objStrs = {}, | ||
objs = [], | ||
prims = [], | ||
|
||
valCount = 0, | ||
objCount = 0, | ||
valCount = 0, | ||
objCount = 0, | ||
|
||
fileData = '', | ||
fileData = "", | ||
|
||
locReducedData = {}, | ||
locNames = Object.keys(locStringData); | ||
locReducedData = {}, | ||
locNames = Object.keys(locStringData) | ||
|
||
const | ||
defaultLocale = 'en', | ||
defaultLocaleIndex = locNames.indexOf(defaultLocale); | ||
defaultLocale = "en", | ||
defaultLocaleIndex = locNames.indexOf(defaultLocale) | ||
|
||
if (defaultLocaleIndex !== -1) { | ||
// Move the default locale to the beginning | ||
locNames.splice(defaultLocaleIndex, 1); | ||
locNames.unshift(defaultLocale); | ||
locNames.splice(defaultLocaleIndex, 1) | ||
locNames.unshift(defaultLocale) | ||
} | ||
|
||
locNames.forEach((k) => { | ||
const c = locStringData[k]; | ||
locReducedData[k] = JSON.parse(c, reviver); | ||
}); | ||
const c = locStringData[k] | ||
locReducedData[k] = JSON.parse(c, reviver) | ||
}) | ||
|
||
fileData += '(function(addLocaleData){\n'; | ||
fileData += `var a=${JSON.stringify(prims)},b=[];`; | ||
fileData += "(function(addLocaleData){\n" | ||
fileData += `var a=${JSON.stringify(prims)},b=[];` | ||
objs.forEach((val, idx) => { | ||
const ref = JSON.stringify(val).replace(/"###(objs|prims)(\[[^#]+)###"/g, replacer); | ||
const ref = JSON.stringify(val).replace(/"###(objs|prims)(\[[^#]+)###"/g, replacer) | ||
|
||
fileData += `b[${idx}]=${ref};`; | ||
}); | ||
fileData += `b[${idx}]=${ref};` | ||
}) | ||
|
||
locNames.forEach((k) => { | ||
fileData += `addLocaleData(${locReducedData[k].replace(/###(objs|prims)(\[[^#]+)###/, replacer)}); | ||
`; | ||
}); | ||
fileData += `addLocaleData(${locReducedData[k].replace(/###(objs|prims)(\[[^#]+)###/, replacer)}); | ||
` | ||
}) | ||
|
||
fileData += `})(IntlPolyfill.__addLocaleData);`; | ||
fileData += `})(IntlPolyfill.__addLocaleData);` | ||
|
||
// writting the complete optimized bundle | ||
writeFile('locale-data/complete.js', fileData); | ||
writeFile("locale-data/complete.js", fileData) | ||
|
||
console.log('Total number of reused strings is ' + prims.length + ' (reduced from ' + valCount + ')'); | ||
console.log('Total number of reused objects is ' + Object.keys(objStrs).length + ' (reduced from ' + objCount + ')'); | ||
console.log(`Total number of reused strings is ${ prims.length } (reduced from ${ valCount })`) | ||
console.log(`Total number of reused objects is ${ Object.keys(objStrs).length } (reduced from ${ objCount })`) | ||
|
||
process.on('unhandledRejection', (reason) => {throw reason;}); | ||
console.log('Writing locale data files...'); | ||
process.on("unhandledRejection", (reason) => { throw reason }) | ||
console.log("Writing locale data files...") |
Oops, something went wrong.