diff --git a/packages/node-builtins/README.md b/packages/node-builtins/README.md new file mode 100644 index 000000000..16edf5d7a --- /dev/null +++ b/packages/node-builtins/README.md @@ -0,0 +1,92 @@ +# @rollup/plugin-node-builtins + +``` +npm install --save-dev @rollup/plugin-node-builtins +``` + +Allows the node builtins to be `require`d/`import`ed. Doing so gives the proper shims to support modules that were designed for Browserify, some modules require [rollup-plugin-node-globals](https://github.com/calvinmetcalf/rollup-plugin-node-globals). + +The following modules include ES6 specific version which allow you to do named imports in addition to the default import and should work fine if you only use this plugin. + +- process\* +- events +- stream\* +- util\* +- path +- buffer\* +- querystring +- url\* +- string_decoder\* +- punycode +- http\*† +- https\*† +- os\* +- assert\* +- constants +- timers\* +- console\*‡ +- vm\*§ +- zlib\* +- tty +- domain +- dns∆ +- dgram∆ +- child_process∆ +- cluster∆ +- module∆ +- net∆ +- readline∆ +- repl∆ +- tls∆ +- fs˚ +- crypto˚ + +\* requires [node-globals plugin](https://github.com/calvinmetcalf/rollup-plugin-node-globals) + +† the http and https modules are actually the same and don't differentiate based on protocol + +‡ default export only, because it's console, seriously just use the global + +§ vm does not have all corner cases and has less of them in a web worker + +∆ not shimmed, just returns mock + +˚ optional, add option to enable browserified shim + +Crypto is not shimmed and and we just provide the commonjs one from browserify and it will likely not work, if you really want it please pass `{crypto: true}` as an option. + +Not all included modules rollup equally, streams (and by extension anything that requires it like http) are a mess of circular references that are pretty much impossible to tree-shake out, similarly url methods are actually a shortcut to a url object so those methods don't tree shake out very well, punycode, path, querystring, events, util, and process tree shake very well especially if you do named imports. + +config for using this with something simple like events or querystring + +```js +import builtins from 'rollup-plugin-node-builtins'; +rollup({ + entry: 'main.js', + plugins: [builtins()] +}); +``` + +and now if main contains this, it should just work + +```js +import EventEmitter from 'events'; +import { inherits } from 'util'; + +// etc +``` + +Config for something more complicated like http + +```js +import builtins from 'rollup-plugin-node-builtins'; +import globals from 'rollup-plugin-node-globals'; +rollup({ + entry: 'main.js', + plugins: [globals(), builtins()] +}); +``` + +# License + +MIT except ES6 ports of browserify modules which are whatever the original library was. diff --git a/packages/node-builtins/build-constants.js b/packages/node-builtins/build-constants.js new file mode 100644 index 000000000..bc2964816 --- /dev/null +++ b/packages/node-builtins/build-constants.js @@ -0,0 +1,18 @@ +const fs = require('fs'); +const constants = require('constants'); +const path = require('path'); + +const out = fs.createWriteStream(path.join(__dirname, 'dist', 'constants.js')); + +Object.keys(constants).forEach((key) => { + const value = constants[key]; + out.write(`export var ${key} = ${JSON.stringify(value)};\n`); +}); +out.write('export default {\n '); +Object.keys(constants).forEach((key, i) => { + if (i) { + out.write(',\n '); + } + out.write(`${key}: ${key}`); +}); +out.end('\n};\n'); diff --git a/packages/node-builtins/package.json b/packages/node-builtins/package.json new file mode 100644 index 000000000..ba0b2e432 --- /dev/null +++ b/packages/node-builtins/package.json @@ -0,0 +1,38 @@ +{ + "name": "@rollup/plugin-node-builtins", + "version": "2.1.1", + "description": "use node builtins in browser with rollup", + "license": "ISC", + "repository": { + "type": "git", + "url": "git@github.com:calvinmetcalf/rollup-plugin-node-builtins.git" + }, + "author": "", + "main": "dist/rollup-plugin-node-builtins.cjs.js", + "scripts": { + "build": "rollup -c -f cjs -o dist/rollup-plugin-node-builtins.cjs.js && rollup -c -f es -o dist/rollup-plugin-node-builtins.es6.js && node build-constants.js", + "prebuild": "rm -rf dist && mkdir dist", + "prepublish": "npm test", + "pretest": "npm run build", + "test": "mocha" + }, + "keywords": [ + "rollup-plugin" + ], + "dependencies": { + "browserify-fs": "^1.0.0", + "buffer-es6": "^4.9.2", + "crypto-browserify": "^3.11.0", + "process-es6": "^0.11.2" + }, + "devDependencies": { + "babel-preset-es2015-rollup": "^3.0.0", + "debug": "^2.2.0", + "mocha": "^3.0.2", + "rollup": "^0.37.0", + "rollup-plugin-babel": "^2.4.0", + "rollup-plugin-node-globals": "^1.0.7", + "serve": "^2.0.0" + }, + "jsnext:main": "dist/rollup-plugin-node-builtins.es6.js" +} diff --git a/packages/node-builtins/rollup.config.js b/packages/node-builtins/rollup.config.js new file mode 100644 index 000000000..cab19848c --- /dev/null +++ b/packages/node-builtins/rollup.config.js @@ -0,0 +1,9 @@ +import babel from 'rollup-plugin-babel'; + +const external = Object.keys(require('./package.json').dependencies).concat('path'); + +export default { + entry: 'src/index.js', + plugins: [babel()], + external +}; diff --git a/packages/node-builtins/src/es6/assert.js b/packages/node-builtins/src/es6/assert.js new file mode 100644 index 000000000..4b7a2032d --- /dev/null +++ b/packages/node-builtins/src/es6/assert.js @@ -0,0 +1,498 @@ +// based on node assert, original notice: + +// http://wiki.commonjs.org/wiki/Unit_Testing/1.0 +// +// THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8! +// +// Originally from narwhal.js (http://narwhaljs.org) +// Copyright (c) 2009 Thomas Robinson <280north.com> +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +import { isBuffer } from 'buffer'; +import { + isPrimitive, + inherits, + isError, + isFunction, + isRegExp, + isDate, + inspect as utilInspect +} from 'util'; + +function compare(a, b) { + if (a === b) { + return 0; + } + + let x = a.length; + let y = b.length; + + for (let i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i]; + y = b[i]; + break; + } + } + + if (x < y) { + return -1; + } + if (y < x) { + return 1; + } + return 0; +} +const hasOwn = Object.prototype.hasOwnProperty; + +const objectKeys = + Object.keys || + function(obj) { + const keys = []; + for (const key in obj) { + if (hasOwn.call(obj, key)) keys.push(key); + } + return keys; + }; + +const pSlice = Array.prototype.slice; +let _functionsHaveNames; +function functionsHaveNames() { + if (typeof _functionsHaveNames !== 'undefined') { + return _functionsHaveNames; + } + return (_functionsHaveNames = (function() { + return function foo() {}.name === 'foo'; + })()); +} +function pToString(obj) { + return Object.prototype.toString.call(obj); +} +function isView(arrbuf) { + if (isBuffer(arrbuf)) { + return false; + } + if (typeof global.ArrayBuffer !== 'function') { + return false; + } + if (typeof ArrayBuffer.isView === 'function') { + return ArrayBuffer.isView(arrbuf); + } + if (!arrbuf) { + return false; + } + if (arrbuf instanceof DataView) { + return true; + } + if (arrbuf.buffer && arrbuf.buffer instanceof ArrayBuffer) { + return true; + } + return false; +} +// 1. The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +function assert(value, message) { + if (!value) fail(value, true, message, '==', ok); +} +export default assert; + +// 2. The AssertionError is defined in assert. +// new assert.AssertionError({ message: message, +// actual: actual, +// expected: expected }) + +const regex = /\s*function\s+([^\(\s]*)\s*/; +// based on https://github.com/ljharb/function.prototype.name/blob/adeeeec8bfcc6068b187d7d9fb3d5bb1d3a30899/implementation.js +function getName(func) { + if (!isFunction(func)) { + return; + } + if (functionsHaveNames()) { + return func.name; + } + const str = func.toString(); + const match = str.match(regex); + return match && match[1]; +} +assert.AssertionError = AssertionError; +export function AssertionError(options) { + this.name = 'AssertionError'; + this.actual = options.actual; + this.expected = options.expected; + this.operator = options.operator; + if (options.message) { + this.message = options.message; + this.generatedMessage = false; + } else { + this.message = getMessage(this); + this.generatedMessage = true; + } + const stackStartFunction = options.stackStartFunction || fail; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, stackStartFunction); + } else { + // non v8 browsers so we can have a stacktrace + const err = new Error(); + if (err.stack) { + let out = err.stack; + + // try to strip useless frames + const fn_name = getName(stackStartFunction); + const idx = out.indexOf(`\n${fn_name}`); + if (idx >= 0) { + // once we have located the function frame + // we need to strip out everything before it (and its line) + const next_line = out.indexOf('\n', idx + 1); + out = out.substring(next_line + 1); + } + + this.stack = out; + } + } +} + +// assert.AssertionError instanceof Error +inherits(AssertionError, Error); + +function truncate(s, n) { + if (typeof s === 'string') { + return s.length < n ? s : s.slice(0, n); + } + return s; +} +function inspect(something) { + if (functionsHaveNames() || !isFunction(something)) { + return utilInspect(something); + } + const rawname = getName(something); + const name = rawname ? `: ${rawname}` : ''; + return `[Function${name}]`; +} +function getMessage(self) { + return `${truncate(inspect(self.actual), 128)} ${self.operator} ${truncate( + inspect(self.expected), + 128 + )}`; +} + +// At present only the three keys mentioned above are used and +// understood by the spec. Implementations or sub modules can pass +// other keys to the AssertionError's constructor - they will be +// ignored. + +// 3. All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +export function fail(actual, expected, message, operator, stackStartFunction) { + throw new AssertionError({ + message, + actual, + expected, + operator, + stackStartFunction + }); +} + +// EXTENSION! allows for well behaved errors defined elsewhere. +assert.fail = fail; + +// 4. Pure assertion tests whether a value is truthy, as determined +// by !!guard. +// assert.ok(guard, message_opt); +// This statement is equivalent to assert.equal(true, !!guard, +// message_opt);. To test strictly for the value true, use +// assert.strictEqual(true, guard, message_opt);. + +export function ok(value, message) { + if (!value) fail(value, true, message, '==', ok); +} +assert.ok = ok; +export { ok as assert }; + +// 5. The equality assertion tests shallow, coercive equality with +// ==. +// assert.equal(actual, expected, message_opt); +assert.equal = equal; +export function equal(actual, expected, message) { + if (actual != expected) fail(actual, expected, message, '==', equal); +} + +// 6. The non-equality assertion tests for whether two objects are not equal +// with != assert.notEqual(actual, expected, message_opt); +assert.notEqual = notEqual; +export function notEqual(actual, expected, message) { + if (actual == expected) { + fail(actual, expected, message, '!=', notEqual); + } +} + +// 7. The equivalence assertion tests a deep equality relation. +// assert.deepEqual(actual, expected, message_opt); +assert.deepEqual = deepEqual; +export function deepEqual(actual, expected, message) { + if (!_deepEqual(actual, expected, false)) { + fail(actual, expected, message, 'deepEqual', deepEqual); + } +} +assert.deepStrictEqual = deepStrictEqual; +export function deepStrictEqual(actual, expected, message) { + if (!_deepEqual(actual, expected, true)) { + fail(actual, expected, message, 'deepStrictEqual', deepStrictEqual); + } +} + +function _deepEqual(actual, expected, strict, memos) { + // 7.1. All identical values are equivalent, as determined by ===. + if (actual === expected) { + return true; + } else if (isBuffer(actual) && isBuffer(expected)) { + return compare(actual, expected) === 0; + + // 7.2. If the expected value is a Date object, the actual value is + // equivalent if it is also a Date object that refers to the same time. + } else if (isDate(actual) && isDate(expected)) { + return actual.getTime() === expected.getTime(); + + // 7.3 If the expected value is a RegExp object, the actual value is + // equivalent if it is also a RegExp object with the same source and + // properties (`global`, `multiline`, `lastIndex`, `ignoreCase`). + } else if (isRegExp(actual) && isRegExp(expected)) { + return ( + actual.source === expected.source && + actual.global === expected.global && + actual.multiline === expected.multiline && + actual.lastIndex === expected.lastIndex && + actual.ignoreCase === expected.ignoreCase + ); + + // 7.4. Other pairs that do not both pass typeof value == 'object', + // equivalence is determined by ==. + } else if ( + (actual === null || typeof actual !== 'object') && + (expected === null || typeof expected !== 'object') + ) { + return strict ? actual === expected : actual == expected; + + // If both values are instances of typed arrays, wrap their underlying + // ArrayBuffers in a Buffer each to increase performance + // This optimization requires the arrays to have the same type as checked by + // Object.prototype.toString (aka pToString). Never perform binary + // comparisons for Float*Arrays, though, since e.g. +0 === -0 but their + // bit patterns are not identical. + } else if ( + isView(actual) && + isView(expected) && + pToString(actual) === pToString(expected) && + !(actual instanceof Float32Array || actual instanceof Float64Array) + ) { + return compare(new Uint8Array(actual.buffer), new Uint8Array(expected.buffer)) === 0; + + // 7.5 For all other Object pairs, including Array objects, equivalence is + // determined by having the same number of owned properties (as verified + // with Object.prototype.hasOwnProperty.call), the same set of keys + // (although not necessarily the same order), equivalent values for every + // corresponding key, and an identical 'prototype' property. Note: this + // accounts for both named and indexed properties on Arrays. + } else if (isBuffer(actual) !== isBuffer(expected)) { + return false; + } + memos = memos || { actual: [], expected: [] }; + + const actualIndex = memos.actual.indexOf(actual); + if (actualIndex !== -1) { + if (actualIndex === memos.expected.indexOf(expected)) { + return true; + } + } + + memos.actual.push(actual); + memos.expected.push(expected); + + return objEquiv(actual, expected, strict, memos); +} + +function isArguments(object) { + return Object.prototype.toString.call(object) == '[object Arguments]'; +} + +function objEquiv(a, b, strict, actualVisitedObjects) { + if (a === null || a === undefined || b === null || b === undefined) return false; + // if one is a primitive, the other must be same + if (isPrimitive(a) || isPrimitive(b)) return a === b; + if (strict && Object.getPrototypeOf(a) !== Object.getPrototypeOf(b)) return false; + const aIsArgs = isArguments(a); + const bIsArgs = isArguments(b); + if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs)) return false; + if (aIsArgs) { + a = pSlice.call(a); + b = pSlice.call(b); + return _deepEqual(a, b, strict); + } + const ka = objectKeys(a); + const kb = objectKeys(b); + let key; + let i; + // having the same number of owned properties (keys incorporates + // hasOwnProperty) + if (ka.length !== kb.length) return false; + // the same set of keys (although not necessarily the same order), + ka.sort(); + kb.sort(); + // ~~~cheap key test + for (i = ka.length - 1; i >= 0; i--) { + if (ka[i] !== kb[i]) return false; + } + // equivalent values for every corresponding key, and + // ~~~possibly expensive deep test + for (i = ka.length - 1; i >= 0; i--) { + key = ka[i]; + if (!_deepEqual(a[key], b[key], strict, actualVisitedObjects)) return false; + } + return true; +} + +// 8. The non-equivalence assertion tests for any deep inequality. +// assert.notDeepEqual(actual, expected, message_opt); +assert.notDeepEqual = notDeepEqual; +export function notDeepEqual(actual, expected, message) { + if (_deepEqual(actual, expected, false)) { + fail(actual, expected, message, 'notDeepEqual', notDeepEqual); + } +} + +assert.notDeepStrictEqual = notDeepStrictEqual; +export function notDeepStrictEqual(actual, expected, message) { + if (_deepEqual(actual, expected, true)) { + fail(actual, expected, message, 'notDeepStrictEqual', notDeepStrictEqual); + } +} + +// 9. The strict equality assertion tests strict equality, as determined by ===. +// assert.strictEqual(actual, expected, message_opt); +assert.strictEqual = strictEqual; +export function strictEqual(actual, expected, message) { + if (actual !== expected) { + fail(actual, expected, message, '===', strictEqual); + } +} + +// 10. The strict non-equality assertion tests for strict inequality, as +// determined by !==. assert.notStrictEqual(actual, expected, message_opt); +assert.notStrictEqual = notStrictEqual; +export function notStrictEqual(actual, expected, message) { + if (actual === expected) { + fail(actual, expected, message, '!==', notStrictEqual); + } +} + +function expectedException(actual, expected) { + if (!actual || !expected) { + return false; + } + + if (Object.prototype.toString.call(expected) == '[object RegExp]') { + return expected.test(actual); + } + + try { + if (actual instanceof expected) { + return true; + } + } catch (e) { + // Ignore. The instanceof check doesn't work for arrow functions. + } + + if (Error.isPrototypeOf(expected)) { + return false; + } + + return expected.call({}, actual) === true; +} + +function _tryBlock(block) { + let error; + try { + block(); + } catch (e) { + error = e; + } + return error; +} + +function _throws(shouldThrow, block, expected, message) { + let actual; + + if (typeof block !== 'function') { + throw new TypeError('"block" argument must be a function'); + } + + if (typeof expected === 'string') { + message = expected; + expected = null; + } + + actual = _tryBlock(block); + + message = + (expected && expected.name ? ` (${expected.name}).` : '.') + (message ? ` ${message}` : '.'); + + if (shouldThrow && !actual) { + fail(actual, expected, `Missing expected exception${message}`); + } + + const userProvidedMessage = typeof message === 'string'; + const isUnwantedException = !shouldThrow && isError(actual); + const isUnexpectedException = !shouldThrow && actual && !expected; + + if ( + (isUnwantedException && userProvidedMessage && expectedException(actual, expected)) || + isUnexpectedException + ) { + fail(actual, expected, `Got unwanted exception${message}`); + } + + if ( + (shouldThrow && actual && expected && !expectedException(actual, expected)) || + (!shouldThrow && actual) + ) { + throw actual; + } +} + +// 11. Expected to throw an error: +// assert.throws(block, Error_opt, message_opt); +assert.throws = throws; +export function throws(block, /* optional*/ error, /* optional*/ message) { + _throws(true, block, error, message); +} + +// EXTENSION! This is annoying to write outside this module. +assert.doesNotThrow = doesNotThrow; +export function doesNotThrow(block, /* optional*/ error, /* optional*/ message) { + _throws(false, block, error, message); +} + +assert.ifError = ifError; +export function ifError(err) { + if (err) throw err; +} diff --git a/packages/node-builtins/src/es6/console.js b/packages/node-builtins/src/es6/console.js new file mode 100644 index 000000000..83790d6e9 --- /dev/null +++ b/packages/node-builtins/src/es6/console.js @@ -0,0 +1,15 @@ +function noop() {} + +export default global.console + ? global.console + : { + log: noop, + info: noop, + warn: noop, + error: noop, + dir: noop, + assert: noop, + time: noop, + timeEnd: noop, + trace: noop + }; diff --git a/packages/node-builtins/src/es6/domain.js b/packages/node-builtins/src/es6/domain.js new file mode 100644 index 000000000..7793b5376 --- /dev/null +++ b/packages/node-builtins/src/es6/domain.js @@ -0,0 +1,102 @@ +/* + + +

License

+ +Unless stated otherwise all works are: + + + +and licensed under: + + + +

MIT License

+ +
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ + +*/ +/* +modified by Calvin Metcalf to adhere to how the node one works a little better +*/ +import { EventEmitter } from 'events'; + +import inherits from './inherits'; + +inherits(Domain, EventEmitter); +function createEmitError(d) { + return emitError; + function emitError(e) { + d.emit('error', e); + } +} + +export function Domain() { + EventEmitter.call(this); + this.__emitError = createEmitError(this); +} +Domain.prototype.add = function(emitter) { + emitter.on('error', this.__emitError); +}; +Domain.prototype.remove = function(emitter) { + emitter.removeListener('error', this.__emitError); +}; +Domain.prototype.bind = function(fn) { + const emitError = this.__emitError; + return function() { + const args = Array.prototype.slice.call(arguments); + try { + fn.apply(null, args); + } catch (err) { + emitError(err); + } + }; +}; +Domain.prototype.intercept = function(fn) { + const emitError = this.__emitError; + return function(err) { + if (err) { + emitError(err); + } else { + const args = Array.prototype.slice.call(arguments, 1); + try { + fn.apply(null, args); + } catch (err) { + emitError(err); + } + } + }; +}; +Domain.prototype.run = function(fn) { + const emitError = this.__emitError; + try { + fn(); + } catch (err) { + emitError(err); + } + return this; +}; +Domain.prototype.dispose = function() { + this.removeAllListeners(); + return this; +}; +Domain.prototype.enter = Domain.prototype.exit = function() { + return this; +}; +export function createDomain() { + return new Domain(); +} +export var create = createDomain; + +export default { + Domain, + createDomain, + create +}; diff --git a/packages/node-builtins/src/es6/empty.js b/packages/node-builtins/src/es6/empty.js new file mode 100644 index 000000000..ff8b4c563 --- /dev/null +++ b/packages/node-builtins/src/es6/empty.js @@ -0,0 +1 @@ +export default {}; diff --git a/packages/node-builtins/src/es6/events.js b/packages/node-builtins/src/es6/events.js new file mode 100644 index 000000000..c203c0458 --- /dev/null +++ b/packages/node-builtins/src/es6/events.js @@ -0,0 +1,440 @@ +let domain; + +// This constructor is used to store event handlers. Instantiating this is +// faster than explicitly calling `Object.create(null)` to get a "clean" empty +// object (tested with v8 v4.9). +function EventHandlers() {} +EventHandlers.prototype = Object.create(null); + +function EventEmitter() { + EventEmitter.init.call(this); +} +export default EventEmitter; +export { EventEmitter }; + +// nodejs oddity +// require('events') === require('events').EventEmitter +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.usingDomains = false; + +EventEmitter.prototype.domain = undefined; +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +EventEmitter.defaultMaxListeners = 10; + +EventEmitter.init = function() { + this.domain = null; + if (EventEmitter.usingDomains) { + // if there is an active domain, then attach to it. + if (domain.active && !(this instanceof domain.Domain)) { + this.domain = domain.active; + } + } + + if (!this._events || this._events === Object.getPrototypeOf(this)._events) { + this._events = new EventHandlers(); + this._eventsCount = 0; + } + + this._maxListeners = this._maxListeners || undefined; +}; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== 'number' || n < 0 || isNaN(n)) + throw new TypeError('"n" argument must be a positive number'); + this._maxListeners = n; + return this; +}; + +function $getMaxListeners(that) { + if (that._maxListeners === undefined) return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return $getMaxListeners(this); +}; + +// These standalone emit* functions are used to optimize calling of event +// handlers for fast cases because emit() itself often has a variable number of +// arguments and can be deoptimized because of that. These functions always have +// the same number of arguments and thus do not get deoptimized, so the code +// inside them can execute faster. +function emitNone(handler, isFn, self) { + if (isFn) handler.call(self); + else { + const len = handler.length; + const listeners = arrayClone(handler, len); + for (let i = 0; i < len; ++i) listeners[i].call(self); + } +} +function emitOne(handler, isFn, self, arg1) { + if (isFn) handler.call(self, arg1); + else { + const len = handler.length; + const listeners = arrayClone(handler, len); + for (let i = 0; i < len; ++i) listeners[i].call(self, arg1); + } +} +function emitTwo(handler, isFn, self, arg1, arg2) { + if (isFn) handler.call(self, arg1, arg2); + else { + const len = handler.length; + const listeners = arrayClone(handler, len); + for (let i = 0; i < len; ++i) listeners[i].call(self, arg1, arg2); + } +} +function emitThree(handler, isFn, self, arg1, arg2, arg3) { + if (isFn) handler.call(self, arg1, arg2, arg3); + else { + const len = handler.length; + const listeners = arrayClone(handler, len); + for (let i = 0; i < len; ++i) listeners[i].call(self, arg1, arg2, arg3); + } +} + +function emitMany(handler, isFn, self, args) { + if (isFn) handler.apply(self, args); + else { + const len = handler.length; + const listeners = arrayClone(handler, len); + for (let i = 0; i < len; ++i) listeners[i].apply(self, args); + } +} + +EventEmitter.prototype.emit = function emit(type) { + let er; + let handler; + let len; + let args; + let i; + let events; + let domain; + const needDomainExit = false; + let doError = type === 'error'; + + events = this._events; + if (events) doError = doError && events.error == null; + else if (!doError) return false; + + domain = this.domain; + + // If there is no 'error' event listener then throw. + if (doError) { + er = arguments[1]; + if (domain) { + if (!er) er = new Error('Uncaught, unspecified "error" event'); + er.domainEmitter = this; + er.domain = domain; + er.domainThrown = false; + domain.emit('error', er); + } else if (er instanceof Error) { + throw er; // Unhandled 'error' event + } else { + // At least give some kind of context to the user + const err = new Error(`Uncaught, unspecified "error" event. (${er})`); + err.context = er; + throw err; + } + return false; + } + + handler = events[type]; + + if (!handler) return false; + + const isFn = typeof handler === 'function'; + len = arguments.length; + switch (len) { + // fast cases + case 1: + emitNone(handler, isFn, this); + break; + case 2: + emitOne(handler, isFn, this, arguments[1]); + break; + case 3: + emitTwo(handler, isFn, this, arguments[1], arguments[2]); + break; + case 4: + emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]); + break; + // slower + default: + args = new Array(len - 1); + for (i = 1; i < len; i++) args[i - 1] = arguments[i]; + emitMany(handler, isFn, this, args); + } + + if (needDomainExit) domain.exit(); + + return true; +}; + +function _addListener(target, type, listener, prepend) { + let m; + let events; + let existing; + + if (typeof listener !== 'function') throw new TypeError('"listener" argument must be a function'); + + events = target._events; + if (!events) { + events = target._events = new EventHandlers(); + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener) { + target.emit('newListener', type, listener.listener ? listener.listener : listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (!existing) { + // Optimize the case of one listener. Don't need the extra array object. + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === 'function') { + // Adding the second element, need to change to array. + existing = events[type] = prepend ? [listener, existing] : [existing, listener]; + } else { + // If we've already got an array, just append. + if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + } + + // Check for listener leak + if (!existing.warned) { + m = $getMaxListeners(target); + if (m && m > 0 && existing.length > m) { + existing.warned = true; + const w = new Error( + `Possible EventEmitter memory leak detected. ${existing.length} ${type} listeners added. ` + + `Use emitter.setMaxListeners() to increase limit` + ); + w.name = 'MaxListenersExceededWarning'; + w.emitter = target; + w.type = type; + w.count = existing.length; + emitWarning(w); + } + } + } + + return target; +} +function emitWarning(e) { + typeof console.warn === 'function' ? console.warn(e) : console.log(e); +} +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.prependListener = function prependListener(type, listener) { + return _addListener(this, type, listener, true); +}; + +function _onceWrap(target, type, listener) { + let fired = false; + function g() { + target.removeListener(type, g); + if (!fired) { + fired = true; + listener.apply(target, arguments); + } + } + g.listener = listener; + return g; +} + +EventEmitter.prototype.once = function once(type, listener) { + if (typeof listener !== 'function') throw new TypeError('"listener" argument must be a function'); + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +EventEmitter.prototype.prependOnceListener = function prependOnceListener(type, listener) { + if (typeof listener !== 'function') throw new TypeError('"listener" argument must be a function'); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; +}; + +// emits a 'removeListener' event iff the listener was removed +EventEmitter.prototype.removeListener = function removeListener(type, listener) { + let list; + let events; + let position; + let i; + let originalListener; + + if (typeof listener !== 'function') throw new TypeError('"listener" argument must be a function'); + + events = this._events; + if (!events) return this; + + list = events[type]; + if (!list) return this; + + if (list === listener || (list.listener && list.listener === listener)) { + if (--this._eventsCount === 0) this._events = new EventHandlers(); + else { + delete events[type]; + if (events.removeListener) this.emit('removeListener', type, list.listener || listener); + } + } else if (typeof list !== 'function') { + position = -1; + + for (i = list.length; i-- > 0; ) { + if (list[i] === listener || (list[i].listener && list[i].listener === listener)) { + originalListener = list[i].listener; + position = i; + break; + } + } + + if (position < 0) return this; + + if (list.length === 1) { + list[0] = undefined; + if (--this._eventsCount === 0) { + this._events = new EventHandlers(); + return this; + } + delete events[type]; + } else { + spliceOne(list, position); + } + + if (events.removeListener) this.emit('removeListener', type, originalListener || listener); + } + + return this; +}; + +EventEmitter.prototype.removeAllListeners = function removeAllListeners(type) { + let listeners; + let events; + + events = this._events; + if (!events) return this; + + // not listening for removeListener, no need to emit + if (!events.removeListener) { + if (arguments.length === 0) { + this._events = new EventHandlers(); + this._eventsCount = 0; + } else if (events[type]) { + if (--this._eventsCount === 0) this._events = new EventHandlers(); + else delete events[type]; + } + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + const keys = Object.keys(events); + for (var i = 0, key; i < keys.length; ++i) { + key = keys[i]; + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = new EventHandlers(); + this._eventsCount = 0; + return this; + } + + listeners = events[type]; + + if (typeof listeners === 'function') { + this.removeListener(type, listeners); + } else if (listeners) { + // LIFO order + do { + this.removeListener(type, listeners[listeners.length - 1]); + } while (listeners[0]); + } + + return this; +}; + +EventEmitter.prototype.listeners = function listeners(type) { + let evlistener; + let ret; + const events = this._events; + + if (!events) ret = []; + else { + evlistener = events[type]; + if (!evlistener) ret = []; + else if (typeof evlistener === 'function') ret = [evlistener.listener || evlistener]; + else ret = unwrapListeners(evlistener); + } + + return ret; +}; + +EventEmitter.listenerCount = function(emitter, type) { + if (typeof emitter.listenerCount === 'function') { + return emitter.listenerCount(type); + } + return listenerCount.call(emitter, type); +}; + +EventEmitter.prototype.listenerCount = listenerCount; +function listenerCount(type) { + const events = this._events; + + if (events) { + const evlistener = events[type]; + + if (typeof evlistener === 'function') { + return 1; + } else if (evlistener) { + return evlistener.length; + } + } + + return 0; +} + +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : []; +}; + +// About 1.5x faster than the two-arg version of Array#splice(). +function spliceOne(list, index) { + for (let i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1) list[i] = list[k]; + list.pop(); +} + +function arrayClone(arr, i) { + const copy = new Array(i); + while (i--) copy[i] = arr[i]; + return copy; +} + +function unwrapListeners(arr) { + const ret = new Array(arr.length); + for (let i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; +} diff --git a/packages/node-builtins/src/es6/http-lib/capability.js b/packages/node-builtins/src/es6/http-lib/capability.js new file mode 100644 index 000000000..6314a28c3 --- /dev/null +++ b/packages/node-builtins/src/es6/http-lib/capability.js @@ -0,0 +1,51 @@ +export var hasFetch = isFunction(global.fetch) && isFunction(global.ReadableStream); + +let _blobConstructor; +export function blobConstructor() { + if (typeof _blobConstructor !== 'undefined') { + return _blobConstructor; + } + try { + new global.Blob([new ArrayBuffer(1)]); + _blobConstructor = true; + } catch (e) { + _blobConstructor = false; + } + return _blobConstructor; +} +let xhr; + +function checkTypeSupport(type) { + if (!xhr) { + xhr = new global.XMLHttpRequest(); + // If location.host is empty, e.g. if this page/worker was loaded + // from a Blob, then use example.com to avoid an error + xhr.open('GET', global.location.host ? '/' : 'https://example.com'); + } + try { + xhr.responseType = type; + return xhr.responseType === type; + } catch (e) { + return false; + } +} + +// For some strange reason, Safari 7.0 reports typeof global.ArrayBuffer === 'object'. +// Safari 7.1 appears to have fixed this bug. +const haveArrayBuffer = typeof global.ArrayBuffer !== 'undefined'; +const haveSlice = haveArrayBuffer && isFunction(global.ArrayBuffer.prototype.slice); + +export var arraybuffer = haveArrayBuffer && checkTypeSupport('arraybuffer'); +// These next two tests unavoidably show warnings in Chrome. Since fetch will always +// be used if it's available, just return false for these to avoid the warnings. +export var msstream = !hasFetch && haveSlice && checkTypeSupport('ms-stream'); +export var mozchunkedarraybuffer = + !hasFetch && haveArrayBuffer && checkTypeSupport('moz-chunked-arraybuffer'); +export var overrideMimeType = isFunction(xhr.overrideMimeType); +export var vbArray = isFunction(global.VBArray); + +function isFunction(value) { + return typeof value === 'function'; +} + +xhr = null; // Help gc diff --git a/packages/node-builtins/src/es6/http-lib/request.js b/packages/node-builtins/src/es6/http-lib/request.js new file mode 100644 index 000000000..cfc35312d --- /dev/null +++ b/packages/node-builtins/src/es6/http-lib/request.js @@ -0,0 +1,276 @@ +import { inherits } from 'util'; + +import { Writable } from 'stream'; + +import * as capability from './capability'; +import { IncomingMessage, readyStates as rStates } from './response'; + +import toArrayBuffer from './to-arraybuffer'; + +function decideMode(preferBinary, useFetch) { + if (capability.hasFetch && useFetch) { + return 'fetch'; + } else if (capability.mozchunkedarraybuffer) { + return 'moz-chunked-arraybuffer'; + } else if (capability.msstream) { + return 'ms-stream'; + } else if (capability.arraybuffer && preferBinary) { + return 'arraybuffer'; + } else if (capability.vbArray && preferBinary) { + return 'text:vbarray'; + } + return 'text'; +} +export default ClientRequest; + +function ClientRequest(opts) { + const self = this; + Writable.call(self); + + self._opts = opts; + self._body = []; + self._headers = {}; + if (opts.auth) + self.setHeader('Authorization', `Basic ${new Buffer(opts.auth).toString('base64')}`); + Object.keys(opts.headers).forEach((name) => { + self.setHeader(name, opts.headers[name]); + }); + + let preferBinary; + let useFetch = true; + if (opts.mode === 'disable-fetch') { + // If the use of XHR should be preferred and includes preserving the 'content-type' header + useFetch = false; + preferBinary = true; + } else if (opts.mode === 'prefer-streaming') { + // If streaming is a high priority but binary compatibility and + // the accuracy of the 'content-type' header aren't + preferBinary = false; + } else if (opts.mode === 'allow-wrong-content-type') { + // If streaming is more important than preserving the 'content-type' header + preferBinary = !capability.overrideMimeType; + } else if (!opts.mode || opts.mode === 'default' || opts.mode === 'prefer-fast') { + // Use binary if text streaming may corrupt data or the content-type header, or for speed + preferBinary = true; + } else { + throw new Error('Invalid value for opts.mode'); + } + self._mode = decideMode(preferBinary, useFetch); + + self.on('finish', () => { + self._onFinish(); + }); +} + +inherits(ClientRequest, Writable); +// Taken from http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader%28%29-method +const unsafeHeaders = [ + 'accept-charset', + 'accept-encoding', + 'access-control-request-headers', + 'access-control-request-method', + 'connection', + 'content-length', + 'cookie', + 'cookie2', + 'date', + 'dnt', + 'expect', + 'host', + 'keep-alive', + 'origin', + 'referer', + 'te', + 'trailer', + 'transfer-encoding', + 'upgrade', + 'user-agent', + 'via' +]; +ClientRequest.prototype.setHeader = function(name, value) { + const self = this; + const lowerName = name.toLowerCase(); + // This check is not necessary, but it prevents warnings from browsers about setting unsafe + // headers. To be honest I'm not entirely sure hiding these warnings is a good thing, but + // http-browserify did it, so I will too. + if (unsafeHeaders.indexOf(lowerName) !== -1) return; + + self._headers[lowerName] = { + name, + value + }; +}; + +ClientRequest.prototype.getHeader = function(name) { + const self = this; + return self._headers[name.toLowerCase()].value; +}; + +ClientRequest.prototype.removeHeader = function(name) { + const self = this; + delete self._headers[name.toLowerCase()]; +}; + +ClientRequest.prototype._onFinish = function() { + const self = this; + + if (self._destroyed) return; + const opts = self._opts; + + const headersObj = self._headers; + let body; + if (opts.method === 'POST' || opts.method === 'PUT' || opts.method === 'PATCH') { + if (capability.blobConstructor()) { + body = new global.Blob( + self._body.map((buffer) => toArrayBuffer(buffer)), + { + type: (headersObj['content-type'] || {}).value || '' + } + ); + } else { + // get utf8 string + body = Buffer.concat(self._body).toString(); + } + } + + if (self._mode === 'fetch') { + const headers = Object.keys(headersObj).map((name) => [ + headersObj[name].name, + headersObj[name].value + ]); + + global + .fetch(self._opts.url, { + method: self._opts.method, + headers, + body, + mode: 'cors', + credentials: opts.withCredentials ? 'include' : 'same-origin' + }) + .then( + (response) => { + self._fetchResponse = response; + self._connect(); + }, + (reason) => { + self.emit('error', reason); + } + ); + } else { + const xhr = (self._xhr = new global.XMLHttpRequest()); + try { + xhr.open(self._opts.method, self._opts.url, true); + } catch (err) { + process.nextTick(() => { + self.emit('error', err); + }); + return; + } + + // Can't set responseType on really old browsers + if ('responseType' in xhr) xhr.responseType = self._mode.split(':')[0]; + + if ('withCredentials' in xhr) xhr.withCredentials = !!opts.withCredentials; + + if (self._mode === 'text' && 'overrideMimeType' in xhr) + xhr.overrideMimeType('text/plain; charset=x-user-defined'); + + Object.keys(headersObj).forEach((name) => { + xhr.setRequestHeader(headersObj[name].name, headersObj[name].value); + }); + + self._response = null; + xhr.onreadystatechange = function() { + switch (xhr.readyState) { + case rStates.LOADING: + case rStates.DONE: + self._onXHRProgress(); + break; + } + }; + // Necessary for streaming in Firefox, since xhr.response is ONLY defined + // in onprogress, not in onreadystatechange with xhr.readyState = 3 + if (self._mode === 'moz-chunked-arraybuffer') { + xhr.onprogress = function() { + self._onXHRProgress(); + }; + } + + xhr.onerror = function() { + if (self._destroyed) return; + self.emit('error', new Error('XHR error')); + }; + + try { + xhr.send(body); + } catch (err) { + process.nextTick(() => { + self.emit('error', err); + }); + } + } +}; + +/** + * Checks if xhr.status is readable and non-zero, indicating no error. + * Even though the spec says it should be available in readyState 3, + * accessing it throws an exception in IE8 + */ +function statusValid(xhr) { + try { + const { status } = xhr; + return status !== null && status !== 0; + } catch (e) { + return false; + } +} + +ClientRequest.prototype._onXHRProgress = function() { + const self = this; + + if (!statusValid(self._xhr) || self._destroyed) return; + + if (!self._response) self._connect(); + + self._response._onXHRProgress(); +}; + +ClientRequest.prototype._connect = function() { + const self = this; + + if (self._destroyed) return; + + self._response = new IncomingMessage(self._xhr, self._fetchResponse, self._mode); + self.emit('response', self._response); +}; + +ClientRequest.prototype._write = function(chunk, encoding, cb) { + const self = this; + + self._body.push(chunk); + cb(); +}; + +ClientRequest.prototype.abort = ClientRequest.prototype.destroy = function() { + const self = this; + self._destroyed = true; + if (self._response) self._response._destroyed = true; + if (self._xhr) self._xhr.abort(); + // Currently, there isn't a way to truly abort a fetch. + // If you like bikeshedding, see https://github.com/whatwg/fetch/issues/27 +}; + +ClientRequest.prototype.end = function(data, encoding, cb) { + const self = this; + if (typeof data === 'function') { + cb = data; + data = undefined; + } + + Writable.prototype.end.call(self, data, encoding, cb); +}; + +ClientRequest.prototype.flushHeaders = function() {}; +ClientRequest.prototype.setTimeout = function() {}; +ClientRequest.prototype.setNoDelay = function() {}; +ClientRequest.prototype.setSocketKeepAlive = function() {}; diff --git a/packages/node-builtins/src/es6/http-lib/response.js b/packages/node-builtins/src/es6/http-lib/response.js new file mode 100644 index 000000000..ef148f884 --- /dev/null +++ b/packages/node-builtins/src/es6/http-lib/response.js @@ -0,0 +1,181 @@ +import { inherits } from 'util'; +import { Readable } from 'stream'; + +import { overrideMimeType } from './capability'; + +const rStates = { + UNSENT: 0, + OPENED: 1, + HEADERS_RECEIVED: 2, + LOADING: 3, + DONE: 4 +}; +export { rStates as readyStates }; +export function IncomingMessage(xhr, response, mode) { + const self = this; + Readable.call(self); + + self._mode = mode; + self.headers = {}; + self.rawHeaders = []; + self.trailers = {}; + self.rawTrailers = []; + + // Fake the 'close' event, but only once 'end' fires + self.on('end', () => { + // The nextTick is necessary to prevent the 'request' module from causing an infinite loop + process.nextTick(() => { + self.emit('close'); + }); + }); + let read; + if (mode === 'fetch') { + self._fetchResponse = response; + + self.url = response.url; + self.statusCode = response.status; + self.statusMessage = response.statusText; + // backwards compatible version of for ( of ): + // for (var ,_i,_it = [Symbol.iterator](); = (_i = _it.next()).value,!_i.done;) + for ( + var header, _i, _it = response.headers[Symbol.iterator](); + (header = (_i = _it.next()).value), !_i.done; + + ) { + self.headers[header[0].toLowerCase()] = header[1]; + self.rawHeaders.push(header[0], header[1]); + } + + // TODO: this doesn't respect backpressure. Once WritableStream is available, this can be fixed + const reader = response.body.getReader(); + + read = function() { + reader.read().then((result) => { + if (self._destroyed) return; + if (result.done) { + self.push(null); + return; + } + self.push(new Buffer(result.value)); + read(); + }); + }; + read(); + } else { + self._xhr = xhr; + self._pos = 0; + + self.url = xhr.responseURL; + self.statusCode = xhr.status; + self.statusMessage = xhr.statusText; + const headers = xhr.getAllResponseHeaders().split(/\r?\n/); + headers.forEach((header) => { + const matches = header.match(/^([^:]+):\s*(.*)/); + if (matches) { + const key = matches[1].toLowerCase(); + if (key === 'set-cookie') { + if (self.headers[key] === undefined) { + self.headers[key] = []; + } + self.headers[key].push(matches[2]); + } else if (self.headers[key] !== undefined) { + self.headers[key] += `, ${matches[2]}`; + } else { + self.headers[key] = matches[2]; + } + self.rawHeaders.push(matches[1], matches[2]); + } + }); + + self._charset = 'x-user-defined'; + if (!overrideMimeType) { + const mimeType = self.rawHeaders['mime-type']; + if (mimeType) { + const charsetMatch = mimeType.match(/;\s*charset=([^;])(;|$)/); + if (charsetMatch) { + self._charset = charsetMatch[1].toLowerCase(); + } + } + if (!self._charset) self._charset = 'utf-8'; // best guess + } + } +} + +inherits(IncomingMessage, Readable); + +IncomingMessage.prototype._read = function() {}; + +IncomingMessage.prototype._onXHRProgress = function() { + const self = this; + + const xhr = self._xhr; + + let response = null; + switch (self._mode) { + case 'text:vbarray': // For IE9 + if (xhr.readyState !== rStates.DONE) break; + try { + // This fails in IE8 + response = new global.VBArray(xhr.responseBody).toArray(); + } catch (e) { + // pass + } + if (response !== null) { + self.push(new Buffer(response)); + break; + } + // Falls through in IE8 + case 'text': + try { + // This will fail when readyState = 3 in IE9. Switch mode and wait for readyState = 4 + response = xhr.responseText; + } catch (e) { + self._mode = 'text:vbarray'; + break; + } + if (response.length > self._pos) { + const newData = response.substr(self._pos); + if (self._charset === 'x-user-defined') { + const buffer = new Buffer(newData.length); + for (let i = 0; i < newData.length; i++) buffer[i] = newData.charCodeAt(i) & 0xff; + + self.push(buffer); + } else { + self.push(newData, self._charset); + } + self._pos = response.length; + } + break; + case 'arraybuffer': + if (xhr.readyState !== rStates.DONE || !xhr.response) break; + response = xhr.response; + self.push(new Buffer(new Uint8Array(response))); + break; + case 'moz-chunked-arraybuffer': // take whole + response = xhr.response; + if (xhr.readyState !== rStates.LOADING || !response) break; + self.push(new Buffer(new Uint8Array(response))); + break; + case 'ms-stream': + response = xhr.response; + if (xhr.readyState !== rStates.LOADING) break; + var reader = new global.MSStreamReader(); + reader.onprogress = function() { + if (reader.result.byteLength > self._pos) { + self.push(new Buffer(new Uint8Array(reader.result.slice(self._pos)))); + self._pos = reader.result.byteLength; + } + }; + reader.onload = function() { + self.push(null); + }; + // reader.onerror = ??? // TODO: this + reader.readAsArrayBuffer(response); + break; + } + + // The ms-stream case handles end separately in reader.onload() + if (self._xhr.readyState === rStates.DONE && self._mode !== 'ms-stream') { + self.push(null); + } +}; diff --git a/packages/node-builtins/src/es6/http-lib/to-arraybuffer.js b/packages/node-builtins/src/es6/http-lib/to-arraybuffer.js new file mode 100644 index 000000000..b8a59cd52 --- /dev/null +++ b/packages/node-builtins/src/es6/http-lib/to-arraybuffer.js @@ -0,0 +1,30 @@ +// from https://github.com/jhiesey/to-arraybuffer/blob/6502d9850e70ba7935a7df4ad86b358fc216f9f0/index.js + +// MIT License +// Copyright (c) 2016 John Hiesey +import { isBuffer } from 'buffer'; + +export default function(buf) { + // If the buffer is backed by a Uint8Array, a faster version will work + if (buf instanceof Uint8Array) { + // If the buffer isn't a subarray, return the underlying ArrayBuffer + if (buf.byteOffset === 0 && buf.byteLength === buf.buffer.byteLength) { + return buf.buffer; + } else if (typeof buf.buffer.slice === 'function') { + // Otherwise we need to get a proper copy + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + } + } + + if (isBuffer(buf)) { + // This is the slow version that will work with any Buffer + // implementation (even in old browsers) + const arrayCopy = new Uint8Array(buf.length); + const len = buf.length; + for (let i = 0; i < len; i++) { + arrayCopy[i] = buf[i]; + } + return arrayCopy.buffer; + } + throw new Error('Argument must be a Buffer'); +} diff --git a/packages/node-builtins/src/es6/http.js b/packages/node-builtins/src/es6/http.js new file mode 100644 index 000000000..8bbd3c020 --- /dev/null +++ b/packages/node-builtins/src/es6/http.js @@ -0,0 +1,164 @@ +/* +this and http-lib folder + +The MIT License + +Copyright (c) 2015 John Hiesey + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +*/ +import { parse } from 'url'; + +import ClientRequest from './http-lib/request'; + +export function request(opts, cb) { + if (typeof opts === 'string') opts = parse(opts); + + // Normally, the page is loaded from http or https, so not specifying a protocol + // will result in a (valid) protocol-relative url. However, this won't work if + // the protocol is something else, like 'file:' + const defaultProtocol = global.location.protocol.search(/^https?:$/) === -1 ? 'http:' : ''; + + const protocol = opts.protocol || defaultProtocol; + let host = opts.hostname || opts.host; + const { port } = opts; + const path = opts.path || '/'; + + // Necessary for IPv6 addresses + if (host && host.indexOf(':') !== -1) host = `[${host}]`; + + // This may be a relative url. The browser should always be able to interpret it correctly. + opts.url = (host ? `${protocol}//${host}` : '') + (port ? `:${port}` : '') + path; + opts.method = (opts.method || 'GET').toUpperCase(); + opts.headers = opts.headers || {}; + + // Also valid opts.auth, opts.mode + + const req = new ClientRequest(opts); + if (cb) req.on('response', cb); + return req; +} + +export function get(opts, cb) { + const req = request(opts, cb); + req.end(); + return req; +} + +export function Agent() {} +Agent.defaultMaxSockets = 4; + +export var METHODS = [ + 'CHECKOUT', + 'CONNECT', + 'COPY', + 'DELETE', + 'GET', + 'HEAD', + 'LOCK', + 'M-SEARCH', + 'MERGE', + 'MKACTIVITY', + 'MKCOL', + 'MOVE', + 'NOTIFY', + 'OPTIONS', + 'PATCH', + 'POST', + 'PROPFIND', + 'PROPPATCH', + 'PURGE', + 'PUT', + 'REPORT', + 'SEARCH', + 'SUBSCRIBE', + 'TRACE', + 'UNLOCK', + 'UNSUBSCRIBE' +]; +export var STATUS_CODES = { + 100: 'Continue', + 101: 'Switching Protocols', + 102: 'Processing', // RFC 2518, obsoleted by RFC 4918 + 200: 'OK', + 201: 'Created', + 202: 'Accepted', + 203: 'Non-Authoritative Information', + 204: 'No Content', + 205: 'Reset Content', + 206: 'Partial Content', + 207: 'Multi-Status', // RFC 4918 + 300: 'Multiple Choices', + 301: 'Moved Permanently', + 302: 'Moved Temporarily', + 303: 'See Other', + 304: 'Not Modified', + 305: 'Use Proxy', + 307: 'Temporary Redirect', + 400: 'Bad Request', + 401: 'Unauthorized', + 402: 'Payment Required', + 403: 'Forbidden', + 404: 'Not Found', + 405: 'Method Not Allowed', + 406: 'Not Acceptable', + 407: 'Proxy Authentication Required', + 408: 'Request Time-out', + 409: 'Conflict', + 410: 'Gone', + 411: 'Length Required', + 412: 'Precondition Failed', + 413: 'Request Entity Too Large', + 414: 'Request-URI Too Large', + 415: 'Unsupported Media Type', + 416: 'Requested Range Not Satisfiable', + 417: 'Expectation Failed', + 418: "I'm a teapot", // RFC 2324 + 422: 'Unprocessable Entity', // RFC 4918 + 423: 'Locked', // RFC 4918 + 424: 'Failed Dependency', // RFC 4918 + 425: 'Unordered Collection', // RFC 4918 + 426: 'Upgrade Required', // RFC 2817 + 428: 'Precondition Required', // RFC 6585 + 429: 'Too Many Requests', // RFC 6585 + 431: 'Request Header Fields Too Large', // RFC 6585 + 500: 'Internal Server Error', + 501: 'Not Implemented', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + 504: 'Gateway Time-out', + 505: 'HTTP Version Not Supported', + 506: 'Variant Also Negotiates', // RFC 2295 + 507: 'Insufficient Storage', // RFC 4918 + 509: 'Bandwidth Limit Exceeded', + 510: 'Not Extended', // RFC 2774 + 511: 'Network Authentication Required' // RFC 6585 +}; + +export default { + request, + get, + Agent, + METHODS, + STATUS_CODES +}; diff --git a/packages/node-builtins/src/es6/inherits.js b/packages/node-builtins/src/es6/inherits.js new file mode 100644 index 000000000..4284ebe19 --- /dev/null +++ b/packages/node-builtins/src/es6/inherits.js @@ -0,0 +1,24 @@ +let inherits; +if (typeof Object.create === 'function') { + inherits = function inherits(ctor, superCtor) { + // implementation from standard node.js 'util' module + ctor.super_ = superCtor; + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + inherits = function inherits(ctor, superCtor) { + ctor.super_ = superCtor; + const TempCtor = function() {}; + TempCtor.prototype = superCtor.prototype; + ctor.prototype = new TempCtor(); + ctor.prototype.constructor = ctor; + }; +} +export default inherits; diff --git a/packages/node-builtins/src/es6/os.js b/packages/node-builtins/src/es6/os.js new file mode 100644 index 000000000..e0b035cf1 --- /dev/null +++ b/packages/node-builtins/src/es6/os.js @@ -0,0 +1,114 @@ +/* +The MIT License (MIT) + +Copyright (c) 2016 CoderPuppy + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +*/ +let _endianness; +export function endianness() { + if (typeof _endianness === 'undefined') { + const a = new ArrayBuffer(2); + const b = new Uint8Array(a); + const c = new Uint16Array(a); + b[0] = 1; + b[1] = 2; + if (c[0] === 258) { + _endianness = 'BE'; + } else if (c[0] === 513) { + _endianness = 'LE'; + } else { + throw new Error('unable to figure out endianess'); + } + } + return _endianness; +} + +export function hostname() { + if (typeof global.location !== 'undefined') { + return global.location.hostname; + } + return ''; +} + +export function loadavg() { + return []; +} + +export function uptime() { + return 0; +} + +export function freemem() { + return Number.MAX_VALUE; +} + +export function totalmem() { + return Number.MAX_VALUE; +} + +export function cpus() { + return []; +} + +export function type() { + return 'Browser'; +} + +export function release() { + if (typeof global.navigator !== 'undefined') { + return global.navigator.appVersion; + } + return ''; +} + +export function networkInterfaces() {} +export function getNetworkInterfaces() {} + +export function arch() { + return 'javascript'; +} + +export function platform() { + return 'browser'; +} + +export function tmpDir() { + return '/tmp'; +} +export var tmpdir = tmpDir; + +export var EOL = '\n'; +export default { + EOL, + tmpdir, + tmpDir, + networkInterfaces, + getNetworkInterfaces, + release, + type, + cpus, + totalmem, + freemem, + uptime, + loadavg, + hostname, + endianness +}; diff --git a/packages/node-builtins/src/es6/path.js b/packages/node-builtins/src/es6/path.js new file mode 100644 index 000000000..872f106fb --- /dev/null +++ b/packages/node-builtins/src/es6/path.js @@ -0,0 +1,237 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// resolves . and .. elements in a path array with directory names there +// must be no slashes, empty elements, or device names (c:\) in the array +// (so also no leading and trailing slashes - it does not distinguish +// relative and absolute paths) +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + let up = 0; + for (let i = parts.length - 1; i >= 0; i--) { + const last = parts[i]; + if (last === '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +} + +// Split a filename into [root, dir, basename, ext], unix version +// 'root' is just a slash, or nothing. +const splitPathRe = /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; +const splitPath = function(filename) { + return splitPathRe.exec(filename).slice(1); +}; + +// path.resolve([from ...], to) +// posix version +export function resolve() { + let resolvedPath = ''; + let resolvedAbsolute = false; + + for (let i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { + const path = i >= 0 ? arguments[i] : '/'; + + // Skip empty and invalid entries + if (typeof path !== 'string') { + throw new TypeError('Arguments to path.resolve must be strings'); + } else if (!path) { + continue; + } + + resolvedPath = `${path}/${resolvedPath}`; + resolvedAbsolute = path.charAt(0) === '/'; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeArray( + filter(resolvedPath.split('/'), (p) => !!p), + !resolvedAbsolute + ).join('/'); + + return (resolvedAbsolute ? '/' : '') + resolvedPath || '.'; +} + +// path.normalize(path) +// posix version +export function normalize(path) { + const isPathAbsolute = isAbsolute(path); + const trailingSlash = substr(path, -1) === '/'; + + // Normalize the path + path = normalizeArray( + filter(path.split('/'), (p) => !!p), + !isPathAbsolute + ).join('/'); + + if (!path && !isPathAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isPathAbsolute ? '/' : '') + path; +} + +// posix version +export function isAbsolute(path) { + return path.charAt(0) === '/'; +} + +// posix version +export function join() { + const paths = Array.prototype.slice.call(arguments, 0); + return normalize( + filter(paths, (p, index) => { + if (typeof p !== 'string') { + throw new TypeError('Arguments to path.join must be strings'); + } + return p; + }).join('/') + ); +} + +// path.relative(from, to) +// posix version +export function relative(from, to) { + from = resolve(from).substr(1); + to = resolve(to).substr(1); + + function trim(arr) { + let start = 0; + for (; start < arr.length; start++) { + if (arr[start] !== '') break; + } + + let end = arr.length - 1; + for (; end >= 0; end--) { + if (arr[end] !== '') break; + } + + if (start > end) return []; + return arr.slice(start, end - start + 1); + } + + const fromParts = trim(from.split('/')); + const toParts = trim(to.split('/')); + + const length = Math.min(fromParts.length, toParts.length); + let samePartsLength = length; + for (var i = 0; i < length; i++) { + if (fromParts[i] !== toParts[i]) { + samePartsLength = i; + break; + } + } + + let outputParts = []; + for (var i = samePartsLength; i < fromParts.length; i++) { + outputParts.push('..'); + } + + outputParts = outputParts.concat(toParts.slice(samePartsLength)); + + return outputParts.join('/'); +} + +export var sep = '/'; +export var delimiter = ':'; + +export function dirname(path) { + const result = splitPath(path); + const root = result[0]; + let dir = result[1]; + + if (!root && !dir) { + // No dirname whatsoever + return '.'; + } + + if (dir) { + // It has a dirname, strip trailing slash + dir = dir.substr(0, dir.length - 1); + } + + return root + dir; +} + +export function basename(path, ext) { + let f = splitPath(path)[2]; + // TODO: make this comparison case-insensitive on windows? + if (ext && f.substr(-1 * ext.length) === ext) { + f = f.substr(0, f.length - ext.length); + } + return f; +} + +export function extname(path) { + return splitPath(path)[3]; +} +export default { + extname, + basename, + dirname, + sep, + delimiter, + relative, + join, + isAbsolute, + normalize, + resolve +}; +function filter(xs, f) { + if (xs.filter) return xs.filter(f); + const res = []; + for (let i = 0; i < xs.length; i++) { + if (f(xs[i], i, xs)) res.push(xs[i]); + } + return res; +} + +// String.prototype.substr - negative index don't work in IE8 +var substr = + 'ab'.substr(-1) === 'b' + ? function(str, start, len) { + return str.substr(start, len); + } + : function(str, start, len) { + if (start < 0) start = str.length + start; + return str.substr(start, len); + }; diff --git a/packages/node-builtins/src/es6/punycode.js b/packages/node-builtins/src/es6/punycode.js new file mode 100644 index 000000000..43538edf6 --- /dev/null +++ b/packages/node-builtins/src/es6/punycode.js @@ -0,0 +1,463 @@ +/*! https://mths.be/punycode v1.4.1 by @mathias */ + +/** Highest positive signed 32-bit float value */ +const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1 + +/** Bootstring parameters */ +const base = 36; +const tMin = 1; +const tMax = 26; +const skew = 38; +const damp = 700; +const initialBias = 72; +const initialN = 128; // 0x80 +const delimiter = '-'; // '\x2D' + +/** Regular expressions */ +const regexPunycode = /^xn--/; +const regexNonASCII = /[^\x20-\x7E]/; // unprintable ASCII chars + non-ASCII chars +const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators + +/** Error messages */ +const errors = { + overflow: 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' +}; + +/** Convenience shortcuts */ +const baseMinusTMin = base - tMin; +const { floor } = Math; +const stringFromCharCode = String.fromCharCode; + +/*--------------------------------------------------------------------------*/ + +/** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ +function error(type) { + throw new RangeError(errors[type]); +} + +/** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ +function map(array, fn) { + let { length } = array; + const result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; +} + +/** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ +function mapDomain(string, fn) { + const parts = string.split('@'); + let result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = `${parts[0]}@`; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + const labels = string.split('.'); + const encoded = map(labels, fn).join('.'); + return result + encoded; +} + +/** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ +function ucs2decode(string) { + const output = []; + let counter = 0; + const { length } = string; + let value; + let extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xd800 && value <= 0xdbff && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xfc00) == 0xdc00) { + // low surrogate + output.push(((value & 0x3ff) << 10) + (extra & 0x3ff) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; +} + +/** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ +function ucs2encode(array) { + return map(array, (value) => { + let output = ''; + if (value > 0xffff) { + value -= 0x10000; + output += stringFromCharCode(((value >>> 10) & 0x3ff) | 0xd800); + value = 0xdc00 | (value & 0x3ff); + } + output += stringFromCharCode(value); + return output; + }).join(''); +} + +/** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ +function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; +} + +/** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ +function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); +} + +/** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ +function adapt(delta, numPoints, firstTime) { + let k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (; /* no initialization */ delta > (baseMinusTMin * tMax) >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + ((baseMinusTMin + 1) * delta) / (delta + skew)); +} + +/** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ +export function decode(input) { + // Don't use UCS-2 + const output = []; + const inputLength = input.length; + let out; + let i = 0; + let n = initialN; + let bias = initialBias; + let basic; + let j; + let index; + let oldi; + let w; + let k; + let digit; + let t; + /** Cached calculation results */ + let baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength /* no final expression */; ) { + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base /* no condition */; ; k += base) { + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias; + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + } + + return ucs2encode(output); +} + +/** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ +export function encode(input) { + let n; + let delta; + let handledCPCount; + let basicLength; + let bias; + let j; + let m; + let q; + let k; + let t; + let currentValue; + const output = []; + /** `inputLength` will hold the number of code points in `input`. */ + let inputLength; + /** Cached calculation results */ + let handledCPCountPlusOne; + let baseMinusT; + let qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base /* no condition */; ; k += base) { + t = k <= bias ? tMin : k >= bias + tMax ? tMax : k - bias; + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push(stringFromCharCode(digitToBasic(t + (qMinusT % baseMinusT), 0))); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + } + return output.join(''); +} + +/** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ +export function toUnicode(input) { + return mapDomain(input, (string) => + regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string + ); +} + +/** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ +export function toASCII(input) { + return mapDomain(input, (string) => + regexNonASCII.test(string) ? `xn--${encode(string)}` : string + ); +} +export var version = '1.4.1'; +/** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + +export var ucs2 = { + decode: ucs2decode, + encode: ucs2encode +}; +export default { + version, + ucs2, + toASCII, + toUnicode, + encode, + decode +}; diff --git a/packages/node-builtins/src/es6/qs.js b/packages/node-builtins/src/es6/qs.js new file mode 100644 index 000000000..189ab7f44 --- /dev/null +++ b/packages/node-builtins/src/es6/qs.js @@ -0,0 +1,150 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// If obj.hasOwnProperty has been overridden, then calling +// obj.hasOwnProperty(prop) will break. +// See: https://github.com/joyent/node/issues/1707 +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} +const isArray = + Array.isArray || + function(xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; + }; +function stringifyPrimitive(v) { + switch (typeof v) { + case 'string': + return v; + + case 'boolean': + return v ? 'true' : 'false'; + + case 'number': + return isFinite(v) ? v : ''; + + default: + return ''; + } +} + +export function stringify(obj, sep, eq, name) { + sep = sep || '&'; + eq = eq || '='; + if (obj === null) { + obj = undefined; + } + + if (typeof obj === 'object') { + return map(objectKeys(obj), (k) => { + const ks = encodeURIComponent(stringifyPrimitive(k)) + eq; + if (isArray(obj[k])) { + return map(obj[k], (v) => ks + encodeURIComponent(stringifyPrimitive(v))).join(sep); + } + return ks + encodeURIComponent(stringifyPrimitive(obj[k])); + }).join(sep); + } + + if (!name) return ''; + return ( + encodeURIComponent(stringifyPrimitive(name)) + eq + encodeURIComponent(stringifyPrimitive(obj)) + ); +} + +function map(xs, f) { + if (xs.map) return xs.map(f); + const res = []; + for (let i = 0; i < xs.length; i++) { + res.push(f(xs[i], i)); + } + return res; +} + +var objectKeys = + Object.keys || + function(obj) { + const res = []; + for (const key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key); + } + return res; + }; + +export function parse(qs, sep, eq, options) { + sep = sep || '&'; + eq = eq || '='; + const obj = {}; + + if (typeof qs !== 'string' || qs.length === 0) { + return obj; + } + + const regexp = /\+/g; + qs = qs.split(sep); + + let maxKeys = 1000; + if (options && typeof options.maxKeys === 'number') { + maxKeys = options.maxKeys; + } + + let len = qs.length; + // maxKeys <= 0 means that we should not limit keys count + if (maxKeys > 0 && len > maxKeys) { + len = maxKeys; + } + + for (let i = 0; i < len; ++i) { + const x = qs[i].replace(regexp, '%20'); + const idx = x.indexOf(eq); + var kstr; + var vstr; + var k; + var v; + + if (idx >= 0) { + kstr = x.substr(0, idx); + vstr = x.substr(idx + 1); + } else { + kstr = x; + vstr = ''; + } + + k = decodeURIComponent(kstr); + v = decodeURIComponent(vstr); + + if (!hasOwnProperty(obj, k)) { + obj[k] = v; + } else if (isArray(obj[k])) { + obj[k].push(v); + } else { + obj[k] = [obj[k], v]; + } + } + + return obj; +} +export default { + encode: stringify, + stringify, + decode: parse, + parse +}; +export { stringify as encode, parse as decode }; diff --git a/packages/node-builtins/src/es6/readable-stream/buffer-list.js b/packages/node-builtins/src/es6/readable-stream/buffer-list.js new file mode 100644 index 000000000..038b963a2 --- /dev/null +++ b/packages/node-builtins/src/es6/readable-stream/buffer-list.js @@ -0,0 +1,62 @@ +import { Buffer } from 'buffer'; + +export default BufferList; + +function BufferList() { + this.head = null; + this.tail = null; + this.length = 0; +} + +BufferList.prototype.push = function(v) { + const entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry; + else this.head = entry; + this.tail = entry; + ++this.length; +}; + +BufferList.prototype.unshift = function(v) { + const entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; +}; + +BufferList.prototype.shift = function() { + if (this.length === 0) return; + const ret = this.head.data; + if (this.length === 1) this.head = this.tail = null; + else this.head = this.head.next; + --this.length; + return ret; +}; + +BufferList.prototype.clear = function() { + this.head = this.tail = null; + this.length = 0; +}; + +BufferList.prototype.join = function(s) { + if (this.length === 0) return ''; + let p = this.head; + let ret = `${p.data}`; + while ((p = p.next)) { + ret += s + p.data; + } + return ret; +}; + +BufferList.prototype.concat = function(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + const ret = Buffer.allocUnsafe(n >>> 0); + let p = this.head; + let i = 0; + while (p) { + p.data.copy(ret, i); + i += p.data.length; + p = p.next; + } + return ret; +}; diff --git a/packages/node-builtins/src/es6/readable-stream/duplex.js b/packages/node-builtins/src/es6/readable-stream/duplex.js new file mode 100644 index 000000000..5b140a5c0 --- /dev/null +++ b/packages/node-builtins/src/es6/readable-stream/duplex.js @@ -0,0 +1,44 @@ +import { inherits } from 'util'; +import { nextTick } from 'process'; + +import { Readable } from './readable'; +import { Writable } from './writable'; + +inherits(Duplex, Readable); + +const keys = Object.keys(Writable.prototype); +for (let v = 0; v < keys.length; v++) { + const method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +} +export default Duplex; +export function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} diff --git a/packages/node-builtins/src/es6/readable-stream/passthrough.js b/packages/node-builtins/src/es6/readable-stream/passthrough.js new file mode 100644 index 000000000..c866a8f09 --- /dev/null +++ b/packages/node-builtins/src/es6/readable-stream/passthrough.js @@ -0,0 +1,15 @@ +import { inherits } from 'util'; + +import { Transform } from './transform'; + +inherits(PassThrough, Transform); +export default PassThrough; +export function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function(chunk, encoding, cb) { + cb(null, chunk); +}; diff --git a/packages/node-builtins/src/es6/readable-stream/readable.js b/packages/node-builtins/src/es6/readable-stream/readable.js new file mode 100644 index 000000000..896025128 --- /dev/null +++ b/packages/node-builtins/src/es6/readable-stream/readable.js @@ -0,0 +1,916 @@ +import EventEmitter from 'events'; +import { inherits, debuglog } from 'util'; +import { StringDecoder } from 'string_decoder'; +import { nextTick } from 'process'; + +import BufferList from './buffer-list'; +import { Duplex } from './duplex'; + +Readable.ReadableState = ReadableState; + +const debug = debuglog('stream'); +inherits(Readable, EventEmitter); + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') { + return emitter.prependListener(event, fn); + } + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn); + else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn); + else emitter._events[event] = [fn, emitter._events[event]]; +} +function listenerCount(emitter, type) { + return emitter.listeners(type).length; +} +function ReadableState(options, stream) { + options = options || {}; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + const hwm = options.highWaterMark; + const defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~~this.highWaterMark; + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +export default Readable; +export function Readable(options) { + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options && typeof options.read === 'function') this._read = options.read; + + EventEmitter.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function(chunk, encoding) { + const state = this._readableState; + + if (!state.objectMode && typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function(chunk) { + const state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +Readable.prototype.isPaused = function() { + return this._readableState.flowing === false; +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + const er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || (chunk && chunk.length > 0)) { + if (state.ended && !addToFront) { + const e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + const _e = new Error('stream.unshift() after end event'); + stream.emit('error', _e); + } else { + let skipAdd; + if (state.decoder && !addToFront && !encoding) { + chunk = state.decoder.write(chunk); + skipAdd = !state.objectMode && chunk.length === 0; + } + + if (!addToFront) state.reading = false; + + // Don't add to the buffer if we've decoded to an empty string chunk and + // we're not in object mode + if (!skipAdd) { + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk); + else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return ( + !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0) + ); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function(enc) { + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +const MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || (state.length === 0 && state.ended)) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length; + return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function(n) { + debug('read', n); + n = parseInt(n, 10); + const state = this._readableState; + const nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this); + else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + let doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + let ret; + if (n > 0) ret = fromList(n, state); + else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + let er = null; + if ( + !Buffer.isBuffer(chunk) && + typeof chunk !== 'string' && + chunk !== null && + chunk !== undefined && + !state.objectMode + ) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + const chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + const state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) nextTick(emitReadable_, stream); + else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + let len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function(n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function(dest, pipeOpts) { + const src = this; + const state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + const doEnd = !pipeOpts || pipeOpts.end !== false; + + const endFn = doEnd ? onend : cleanup; + if (state.endEmitted) nextTick(endFn); + else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + const ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + let cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + let increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + const ret = dest.write(chunk); + if (ret === false && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ( + ((state.pipesCount === 1 && state.pipes === dest) || + (state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1)) && + !cleanedUp + ) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (listenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function() { + const state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && src.listeners('data').length) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function(dest) { + const state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + const dests = state.pipes; + const len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (let _i = 0; _i < len; _i++) { + dests[_i].emit('unpipe', this); + } + return this; + } + + // try to find the right one. + const i = indexOf(state.pipes, dest); + if (i === -1) return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function(ev, fn) { + const res = EventEmitter.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + const state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function() { + const state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function() { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + const state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function(stream) { + const state = this._readableState; + let paused = false; + + const self = this; + stream.on('end', () => { + debug('wrapped end'); + if (state.decoder && !state.ended) { + const chunk = state.decoder.end(); + if (chunk && chunk.length) self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', (chunk) => { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return; + else if (!state.objectMode && (!chunk || !chunk.length)) return; + + const ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (const i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = (function(method) { + return function() { + return stream[method].apply(stream, arguments); + }; + })(i); + } + } + + // proxy certain important events. + const events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, (ev) => { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function(n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + let ret; + if (state.objectMode) ret = state.buffer.shift(); + else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join(''); + else if (state.buffer.length === 1) ret = state.buffer.head.data; + else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + let ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + let p = list.head; + let c = 1; + let ret = p.data; + n -= ret.length; + while ((p = p.next)) { + const str = p.data; + const nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str; + else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next; + else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + const ret = Buffer.allocUnsafe(n); + let p = list.head; + let c = 1; + p.data.copy(ret); + n -= p.data.length; + while ((p = p.next)) { + const buf = p.data; + const nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next; + else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + const state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function forEach(xs, f) { + for (let i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf(xs, x) { + for (let i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} diff --git a/packages/node-builtins/src/es6/readable-stream/transform.js b/packages/node-builtins/src/es6/readable-stream/transform.js new file mode 100644 index 000000000..a2d8b7ee3 --- /dev/null +++ b/packages/node-builtins/src/es6/readable-stream/transform.js @@ -0,0 +1,176 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +import { inherits } from 'util'; + +import { Duplex } from './duplex'; + +inherits(Transform, Duplex); + +function TransformState(stream) { + this.afterTransform = function(er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; + this.writeencoding = null; +} + +function afterTransform(stream, er, data) { + const ts = stream._transformState; + ts.transforming = false; + + const cb = ts.writecb; + + if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (data !== null && data !== undefined) stream.push(data); + + cb(er); + + const rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} +export default Transform; +export function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(this); + + // when the writable side finishes, then flush out anything remaining. + const stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + this.once('prefinish', function() { + if (typeof this._flush === 'function') + this._flush((er) => { + done(stream, er); + }); + else done(stream); + }); +} + +Transform.prototype.push = function(chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function(chunk, encoding, cb) { + throw new Error('Not implemented'); +}; + +Transform.prototype._write = function(chunk, encoding, cb) { + const ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + const rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) + this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function(n) { + const ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +function done(stream, er) { + if (er) return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + const ws = stream._writableState; + const ts = stream._transformState; + + if (ws.length) throw new Error('Calling transform done when ws.length != 0'); + + if (ts.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} diff --git a/packages/node-builtins/src/es6/readable-stream/writable.js b/packages/node-builtins/src/es6/readable-stream/writable.js new file mode 100644 index 000000000..356ab1755 --- /dev/null +++ b/packages/node-builtins/src/es6/readable-stream/writable.js @@ -0,0 +1,525 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +import { inherits, deprecate } from 'util'; +import { Buffer } from 'buffer'; +import { EventEmitter } from 'events'; + +import { nextTick } from 'process'; + +import { Duplex } from './duplex'; + +Writable.WritableState = WritableState; +inherits(Writable, EventEmitter); + +function nop() {} + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +function WritableState(options, stream) { + Object.defineProperty(this, 'buffer', { + get: deprecate(function() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') + }); + options = options || {}; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + const hwm = options.highWaterMark; + const defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + const noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function(er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function writableStateGetBuffer() { + let current = this.bufferedRequest; + const out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +export default Writable; +export function Writable(options) { + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + } + + EventEmitter.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function() { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + const er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + nextTick(cb, er); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + let valid = true; + let er = false; + // Always throw error if a null is written + // if we are not in object mode then throw + // if it is not a buffer, string, or undefined. + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if ( + !Buffer.isBuffer(chunk) && + typeof chunk !== 'string' && + chunk !== undefined && + !state.objectMode + ) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function(chunk, encoding, cb) { + const state = this._writableState; + let ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb); + else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function() { + const state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function() { + const state = this._writableState; + + if (state.corked) { + state.corked--; + + if ( + !state.writing && + !state.corked && + !state.finished && + !state.bufferProcessing && + state.bufferedRequest + ) + clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if ( + !( + [ + 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + 'raw' + ].indexOf(`${encoding}`.toLowerCase()) > -1 + ) + ) + throw new TypeError(`Unknown encoding: ${encoding}`); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + const len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + const ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + const last = state.lastBufferedRequest; + state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite); + else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) nextTick(cb, er); + else cb(er); + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + const state = stream._writableState; + const { sync } = state; + const cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb); + else { + // Check if we're actually ready to finish, but don't emit yet + const finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /* */ + nextTick(afterWrite, stream, state, finished, cb); + /* */ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + let entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + const l = state.bufferedRequestCount; + const buffer = new Array(l); + const holder = state.corkedRequestsFree; + holder.entry = entry; + + let count = 0; + while (entry) { + buffer[count] = entry; + entry = entry.next; + count += 1; + } + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + } else { + // Slow case, write chunks one-by-one + while (entry) { + const { chunk } = entry; + const { encoding } = entry; + const cb = entry.callback; + const len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequestCount = 0; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function(chunk, encoding, cb) { + cb(new Error('not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function(chunk, encoding, cb) { + const state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return ( + state.ending && + state.length === 0 && + state.bufferedRequest === null && + !state.finished && + !state.writing + ); +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + const need = needFinish(state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else { + prefinish(stream, state); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) nextTick(cb); + else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + const _this = this; + + this.next = null; + this.entry = null; + + this.finish = function(err) { + let { entry } = _this; + _this.entry = null; + while (entry) { + const cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = _this; + } else { + state.corkedRequestsFree = _this; + } + }; +} diff --git a/packages/node-builtins/src/es6/setimmediate.js b/packages/node-builtins/src/es6/setimmediate.js new file mode 100644 index 000000000..c3581c6ef --- /dev/null +++ b/packages/node-builtins/src/es6/setimmediate.js @@ -0,0 +1,185 @@ +/* +MIT Licence +Copyright (c) 2012 Barnesandnoble.com, llc, Donavon West, and Domenic Denicola +https://github.com/YuzuJS/setImmediate/blob/f1ccbfdf09cb93aadf77c4aa749ea554503b9234/LICENSE.txt +*/ + +let nextHandle = 1; // Spec says greater than zero +const tasksByHandle = {}; +let currentlyRunningATask = false; +const doc = global.document; +let registerImmediate; + +export function setImmediate(callback) { + // Callback can either be a function or a string + if (typeof callback !== 'function') { + callback = new Function(`${callback}`); + } + // Copy function arguments + const args = new Array(arguments.length - 1); + for (let i = 0; i < args.length; i++) { + args[i] = arguments[i + 1]; + } + // Store and register the task + const task = { callback, args }; + tasksByHandle[nextHandle] = task; + registerImmediate(nextHandle); + return nextHandle++; +} + +export function clearImmediate(handle) { + delete tasksByHandle[handle]; +} + +function run(task) { + const { callback } = task; + const { args } = task; + switch (args.length) { + case 0: + callback(); + break; + case 1: + callback(args[0]); + break; + case 2: + callback(args[0], args[1]); + break; + case 3: + callback(args[0], args[1], args[2]); + break; + default: + callback.apply(undefined, args); + break; + } +} + +function runIfPresent(handle) { + // From the spec: "Wait until any invocations of this algorithm started before this one have completed." + // So if we're currently running a task, we'll need to delay this invocation. + if (currentlyRunningATask) { + // Delay by doing a setTimeout. setImmediate was tried instead, but in Firefox 7 it generated a + // "too much recursion" error. + setTimeout(runIfPresent, 0, handle); + } else { + const task = tasksByHandle[handle]; + if (task) { + currentlyRunningATask = true; + try { + run(task); + } finally { + clearImmediate(handle); + currentlyRunningATask = false; + } + } + } +} + +function installNextTickImplementation() { + registerImmediate = function(handle) { + process.nextTick(() => { + runIfPresent(handle); + }); + }; +} + +function canUsePostMessage() { + // The test against `importScripts` prevents this implementation from being installed inside a web worker, + // where `global.postMessage` means something completely different and can't be used for this purpose. + if (global.postMessage && !global.importScripts) { + let postMessageIsAsynchronous = true; + const oldOnMessage = global.onmessage; + global.onmessage = function() { + postMessageIsAsynchronous = false; + }; + global.postMessage('', '*'); + global.onmessage = oldOnMessage; + return postMessageIsAsynchronous; + } +} + +function installPostMessageImplementation() { + // Installs an event handler on `global` for the `message` event: see + // * https://developer.mozilla.org/en/DOM/window.postMessage + // * http://www.whatwg.org/specs/web-apps/current-work/multipage/comms.html#crossDocumentMessages + + const messagePrefix = `setImmediate$${Math.random()}$`; + const onGlobalMessage = function(event) { + if ( + event.source === global && + typeof event.data === 'string' && + event.data.indexOf(messagePrefix) === 0 + ) { + runIfPresent(+event.data.slice(messagePrefix.length)); + } + }; + + if (global.addEventListener) { + global.addEventListener('message', onGlobalMessage, false); + } else { + global.attachEvent('onmessage', onGlobalMessage); + } + + registerImmediate = function(handle) { + global.postMessage(messagePrefix + handle, '*'); + }; +} + +function installMessageChannelImplementation() { + const channel = new MessageChannel(); + channel.port1.onmessage = function(event) { + const handle = event.data; + runIfPresent(handle); + }; + + registerImmediate = function(handle) { + channel.port2.postMessage(handle); + }; +} + +function installReadyStateChangeImplementation() { + const html = doc.documentElement; + registerImmediate = function(handle) { + // Create a