From eaa8dae80d5c460683dc09fc8804c2f661f9de9e Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 17 Oct 2019 12:33:20 +0100 Subject: [PATCH 01/13] feat: EXPERIMENTAL ipfsx API - boot procedure and add API method This PR allows ipfsx to be used by calling `IPFS.create(options)` with `{ EXPERIMENTAL: { ipfsx: true } }` options. It adds a single API method `add` that returns an iterator that yields objects of the form `{ cid, path, size }`. The iterator is decorated with a `first` and `last` function so users can conveniently `await` on the first or last item to be yielded as per the [proposal here](https://github.com/ipfs-shipyard/ipfsx/blob/master/API.md#add). In order to boot up a new ipfsx node I refactored the boot procedure to enable the following: 1. **Remove the big stateful blob "`self`" - components are passed just the dependencies they need to operate.** Right now it is opaque as to which components require which parts of an IPFS node without inspecting the entirety of the component's code. This change makes it easier to look at a component and know what aspects of the IPFS stack it uses and consequently allows us to understand which APIs should be available at which points of the node's lifecycle. It makes the code easier to understand, more maintainable and easier to mock dependencies for unit tests. 1. **Restrict APIs to appropriate lifecycle stage(s).** This PR introduces an `ApiManager` that allows us to update the API that is exposed at any given point. It allows us to (for example) disallow `ipfs.add` before the node is initialized or access `libp2p` before the node is started. The lifecycle methods `init`, `start` and `stop` each define which API methods are available after they have run avoiding having to put boilerplate in every method to check if it can be called when the node is in a particular state. See #1438 1. **Safer and more flexible API usage.** The `ApiManager` allows us to temporarily change APIs to stop `init` from being called again while it is already running and has the facility to rollback to the previous API state if an operation fails. It also enables piggybacking so we don't attempt 2 or more concurrent start/stop calls at once. See #1061 #2257 1. **Enable config changes at runtime.** Having an API that can be updated during a node's lifecycle will enable this feature in the future. **FEEDBACK REQUIRED**: The changes I've made here are a little...racy. They have a bunch of benefits, as I've outlined above but the `ApiManager` is implemented as a `Proxy`, allowing us to swap out the underlying API at will. How do y'all feel about that? Is there a better way or got a suggestion? resolves #1438 resolves #1061 resolves #2257 refs #2509 refs #1670 License: MIT Signed-off-by: Alan Shaw --- package.json | 1 + src/core/api-manager.js | 21 ++ src/core/components-ipfsx/add/index.js | 122 +++++++ src/core/components-ipfsx/add/utils.js | 87 +++++ src/core/components-ipfsx/index.js | 15 + src/core/components-ipfsx/init.js | 302 ++++++++++++++++++ src/core/components-ipfsx/start.js | 131 ++++++++ src/core/components-ipfsx/stop.js | 107 +++++++ .../files-regular/add-async-iterator.js | 159 +-------- src/core/components/files-regular/utils.js | 83 ----- src/core/components/pin.js | 2 +- src/core/errors.js | 19 ++ src/core/index.js | 8 +- src/core/ipfsx.js | 45 +++ src/core/preload.js | 4 +- src/core/runtime/init-assets-browser.js | 1 + src/core/runtime/init-assets-nodejs.js | 15 + src/core/utils.js | 22 ++ test/core/interface.spec.js | 7 + 19 files changed, 917 insertions(+), 234 deletions(-) create mode 100644 src/core/api-manager.js create mode 100644 src/core/components-ipfsx/add/index.js create mode 100644 src/core/components-ipfsx/add/utils.js create mode 100644 src/core/components-ipfsx/index.js create mode 100644 src/core/components-ipfsx/init.js create mode 100644 src/core/components-ipfsx/start.js create mode 100644 src/core/components-ipfsx/stop.js create mode 100644 src/core/errors.js create mode 100644 src/core/ipfsx.js create mode 100644 src/core/runtime/init-assets-browser.js create mode 100644 src/core/runtime/init-assets-nodejs.js diff --git a/package.json b/package.json index 0300c5b8d1..6d9a098b16 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "main": "src/core/index.js", "browser": { "./src/core/components/init-assets.js": false, + "./src/core/runtime/init-assets-nodejs.js": "./src/core/runtime/init-assets-browser.js", "./src/core/runtime/add-from-fs-nodejs.js": "./src/core/runtime/add-from-fs-browser.js", "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", "./src/core/runtime/dns-nodejs.js": "./src/core/runtime/dns-browser.js", diff --git a/src/core/api-manager.js b/src/core/api-manager.js new file mode 100644 index 0000000000..8acc3c9f44 --- /dev/null +++ b/src/core/api-manager.js @@ -0,0 +1,21 @@ +module.exports = class ApiManager { + constructor () { + this._api = {} + this._onUndef = () => undefined + this.api = new Proxy({}, { + get (target, prop) { + return target[prop] === undefined + ? this._onUndef(prop) + : target[prop] + } + }) + } + + update (nextApi, onUndef) { + const prevApi = this._api + const prevUndef = this._onUndef + this._api = nextApi + if (onUndef) this._onUndef = onUndef + return { cancel: () => this.update(prevApi, prevUndef), api: this.api } + } +} diff --git a/src/core/components-ipfsx/add/index.js b/src/core/components-ipfsx/add/index.js new file mode 100644 index 0000000000..7ed62899bc --- /dev/null +++ b/src/core/components-ipfsx/add/index.js @@ -0,0 +1,122 @@ +'use strict' + +const importer = require('ipfs-unixfs-importer') +const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') +const { parseChunkerString } = require('./utils') +const pipe = require('it-pipe') +const { withFirstAndLast } = require('../../utils') + +module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { + return withFirstAndLast(async function * add (source, options) { + options = options || {} + + const opts = { + shardSplitThreshold: constructorOptions.EXPERIMENTAL.sharding ? 1000 : Infinity, + ...options, + strategy: 'balanced', + ...parseChunkerString(options.chunker) + } + + // CID v0 is for multihashes encoded with sha2-256 + if (opts.hashAlg && opts.cidVersion !== 1) { + opts.cidVersion = 1 + } + + if (opts.trickle) { + opts.strategy = 'trickle' + } + + delete opts.trickle + + if (opts.progress) { + let total = 0 + const prog = opts.progress + + opts.progress = (bytes) => { + total += bytes + prog(total) + } + } + + const iterator = pipe( + normaliseAddInput(source), + source => importer(source, ipld, opts), + transformFile(dag, opts), + preloadFile(preload, opts), + pinFile(pin, opts) + ) + + const releaseLock = await gcLock.readLock() + + try { + yield * iterator + } finally { + releaseLock() + } + }) +} + +function transformFile (dag, opts) { + return async function * (source) { + for await (const { cid, path, unixfs } of source) { + if (opts.onlyHash) { + yield { + cid, + path: path || cid.toString(), + size: unixfs.fileSize() + } + + continue + } + + const node = await dag.get(cid, { ...opts, preload: false }) + + yield { + cid, + path: path || cid.toString(), + size: Buffer.isBuffer(node) ? node.length : node.size + } + } + } +} + +function preloadFile (preload, opts) { + return async function * (source) { + for await (const file of source) { + const isRootFile = !file.path || opts.wrapWithDirectory + ? file.path === '' + : !file.path.includes('/') + + const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + + if (shouldPreload) { + preload(file.hash) + } + + yield file + } + } +} + +function pinFile (pin, opts) { + return async function * (source) { + for await (const file of source) { + // Pin a file if it is the root dir of a recursive add or the single file + // of a direct add. + const pin = 'pin' in opts ? opts.pin : true + const isRootDir = !file.path.includes('/') + const shouldPin = pin && isRootDir && !opts.onlyHash + + if (shouldPin) { + // Note: addAsyncIterator() has already taken a GC lock, so tell + // pin.add() not to take a (second) GC lock + await pin.add(file.hash, { + preload: false, + lock: false + }) + } + + yield file + } + } +} diff --git a/src/core/components-ipfsx/add/utils.js b/src/core/components-ipfsx/add/utils.js new file mode 100644 index 0000000000..5c3ee6cc2a --- /dev/null +++ b/src/core/components-ipfsx/add/utils.js @@ -0,0 +1,87 @@ +'use strict' + +/** + * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine + * + * + * @param {String} chunker Chunker algorithm supported formats: + * "size-{size}" + * "rabin" + * "rabin-{avg}" + * "rabin-{min}-{avg}-{max}" + * + * @return {Object} Chunker options for DAGBuilder + */ +const parseChunkerString = (chunker) => { + if (!chunker) { + return { + chunker: 'fixed' + } + } else if (chunker.startsWith('size-')) { + const sizeStr = chunker.split('-')[1] + const size = parseInt(sizeStr) + if (isNaN(size)) { + throw new Error('Chunker parameter size must be an integer') + } + return { + chunker: 'fixed', + chunkerOptions: { + maxChunkSize: size + } + } + } else if (chunker.startsWith('rabin')) { + return { + chunker: 'rabin', + chunkerOptions: parseRabinString(chunker) + } + } else { + throw new Error(`Unrecognized chunker option: ${chunker}`) + } +} + +/** + * Parses rabin chunker string + * + * @param {String} chunker Chunker algorithm supported formats: + * "rabin" + * "rabin-{avg}" + * "rabin-{min}-{avg}-{max}" + * + * @return {Object} rabin chunker options + */ +const parseRabinString = (chunker) => { + const options = {} + const parts = chunker.split('-') + switch (parts.length) { + case 1: + options.avgChunkSize = 262144 + break + case 2: + options.avgChunkSize = parseChunkSize(parts[1], 'avg') + break + case 4: + options.minChunkSize = parseChunkSize(parts[1], 'min') + options.avgChunkSize = parseChunkSize(parts[2], 'avg') + options.maxChunkSize = parseChunkSize(parts[3], 'max') + break + default: + throw new Error('Incorrect chunker format (expected "rabin" "rabin-[avg]" or "rabin-[min]-[avg]-[max]"') + } + + return options +} + +const parseChunkSize = (str, name) => { + const size = parseInt(str) + if (isNaN(size)) { + throw new Error(`Chunker parameter ${name} must be an integer`) + } + + return size +} + +module.exports = { + parseChunkSize, + parseRabinString, + parseChunkerString +} diff --git a/src/core/components-ipfsx/index.js b/src/core/components-ipfsx/index.js new file mode 100644 index 0000000000..f9a2ee0109 --- /dev/null +++ b/src/core/components-ipfsx/index.js @@ -0,0 +1,15 @@ +'use strict' + +module.exports = { + add: require('./add'), + init: require('./init'), + start: require('./start'), + stop: require('./stop'), + legacy: { + config: require('../components/config'), + dag: require('../components/dag'), + libp2p: require('../components/libp2p'), + object: require('../components/object'), + pin: require('../components/pin') + } +} diff --git a/src/core/components-ipfsx/init.js b/src/core/components-ipfsx/init.js new file mode 100644 index 0000000000..5e5acda3b5 --- /dev/null +++ b/src/core/components-ipfsx/init.js @@ -0,0 +1,302 @@ +'use strict' + +const log = require('debug')('ipfs:components:init') +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') +const mergeOptions = require('merge-options') +const promisify = require('promisify-es6') +const getDefaultConfig = require('../runtime/config-nodejs.js') +const createRepo = require('../runtime/repo-nodejs') +const Keychain = require('libp2p-keychain') +const NoKeychain = require('../components/no-keychain') +const GCLock = require('../components/pin/gc-lock') +const { DAGNode } = require('ipld-dag-pb') +const UnixFs = require('ipfs-unixfs') +const multicodec = require('multicodec') +const multiaddr = require('multiaddr') +const { + ERR_ALREADY_INITIALIZING, + ERR_ALREADY_INITIALIZED, + ERR_NOT_STARTED +} = require('../../errors') +const BlockService = require('ipfs-block-service') +const Ipld = require('ipld') +const getDefaultIpldOptions = require('../runtime/ipld-nodejs') +const createPreloader = require('./preload') +const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors +const IPNS = require('../ipns') +const OfflineDatastore = require('../ipns/routing/offline-datastore') +const initAssets = require('../runtime/init-assets-nodejs') +const Components = require('.') +const PinManager = require('../components/pin/pin-manager') + +module.exports = ({ + apiManager, + print, + constructorOptions +}) => async function init (options) { + const { cancel } = apiManager.update({ init: ERR_ALREADY_INITIALIZING }) + + try { + options = mergeOptions({}, options, constructorOptions.init) + + if (constructorOptions.pass) { + options.pass = constructorOptions.pass + } + + if (constructorOptions.config) { + options.config = constructorOptions.config + } + + const repo = typeof options.repo === 'string' || options.repo == null + ? createRepo(options.repo) + : options.repo + + let isInitialized = true + + if (repo.closed) { + try { + await repo.open() + } catch (err) { + if (err.code === ERR_REPO_NOT_INITIALIZED) { + isInitialized = false + } else { + throw err + } + } + } + + const { peerId, config, keychain } = isInitialized + ? await initExistingRepo(repo, options) + : await initNewRepo(repo, options) + + log('peer created') + const peerInfo = new PeerInfo(peerId) + + if (config.Addresses && config.Addresses.Swarm) { + config.Addresses.Swarm.forEach(addr => { + let ma = multiaddr(addr) + + if (ma.getPeerId()) { + ma = ma.encapsulate(`/p2p/${peerInfo.id.toB58String()}`) + } + + peerInfo.multiaddrs.add(ma) + }) + } + + const blockService = new BlockService(repo) + const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) + + const preload = createPreloader(constructorOptions.preload) + await preload.start() + + const gcLock = new GCLock(constructorOptions.repoOwner, { + // Make sure GCLock is specific to repo, for tests where there are + // multiple instances of IPFS + morticeId: repo.path + }) + + const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = Components.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + + const pinManager = new PinManager(repo, dag) + await pinManager.load() + + const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + if (!isInitialized && !options.emptyRepo) { + // add empty unixfs dir object (go-ipfs assumes this exists) + const emptyDirCid = await addEmptyDir({ dag }) + + log('adding default assets') + await initAssets({ add, print }) + + log('initializing IPNS keyspace') + // Setup the offline routing for IPNS. + // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. + const offlineDatastore = new OfflineDatastore(repo) + const ipns = new IPNS(offlineDatastore, repo.datastore, peerInfo, keychain, { pass: options.pass }) + await ipns.initializeKeyspace(peerId.privKey.bytes, emptyDirCid.toString()) + } + + const api = createApi({ + add, + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions: options, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, ERR_NOT_STARTED) + } catch (err) { + cancel() + throw err + } + + return apiManager.api +} + +async function initNewRepo (repo, { privateKey, emptyRepo, bits, profiles, config, pass, print }) { + emptyRepo = emptyRepo || false + bits = bits == null ? 2048 : Number(bits) + + config = mergeOptions(getDefaultConfig(), config) + config = applyProfiles(profiles, config) + + // Verify repo does not exist yet + const exists = await repo.exists() + log('repo exists?', exists) + + if (exists === true) { + throw new Error('repo already exists') + } + + const peerId = await createPeerId({ privateKey, bits, print }) + let keychain = new NoKeychain() + + log('identity generated') + + config.Identity = { + PeerID: peerId.toB58String(), + PrivKey: peerId.privKey.bytes.toString('base64') + } + + privateKey = peerId.privKey + + config.Keychain = Keychain.generateOptions() + + log('peer identity: %s', config.Identity.PeerID) + + await repo.init(config) + await repo.open() + + log('repo opened') + + if (pass) { + log('creating keychain') + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + await keychain.importPeer('self', { privKey: privateKey }) + } + + return { peerId, keychain, config } +} + +async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { + let config = await repo.config.get() + + if (newConfig || profiles) { + if (newConfig) { + config = mergeOptions(config, newConfig) + } + if (profiles) { + config = applyProfiles(profiles, config) + } + await repo.config.set(config) + } + + let keychain = new NoKeychain() + + if (pass) { + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + log('keychain constructed') + } + + const peerId = await promisify(PeerId.createFromPrivKey)(config.Identity.PrivKey) + + // Import the private key as 'self', if needed. + if (pass) { + try { + await keychain.findKeyByName('self') + } catch (err) { + log('Creating "self" key') + await keychain.importPeer('self', peerId) + } + } + + return { peerId, keychain, config } +} + +function createPeerId ({ privateKey, bits, print }) { + if (privateKey) { + log('using user-supplied private-key') + return typeof privateKey === 'object' + ? privateKey + : promisify(PeerId.createFromPrivKey)(Buffer.from(privateKey, 'base64')) + } else { + // Generate peer identity keypair + transform to desired format + add to config. + print('generating %s-bit RSA keypair...', bits) + return promisify(PeerId.create)({ bits }) + } +} + +async function addEmptyDir ({ dag }) { + const node = new DAGNode(new UnixFs('directory').marshal()) + return dag.put(node, { + version: 0, + format: multicodec.DAG_PB, + hashAlg: multicodec.SHA2_256 + }) +} + +// Apply profiles (e.g. ['server', 'lowpower']) to config +function applyProfiles (profiles, config) { + return (profiles || []).reduce((name, config) => { + const profile = Components.legacy.config.profiles[name] + if (!profile) { + throw new Error(`No profile with name '${name}'`) + } + log('applying profile %s', name) + return profile.transform(config) + }) +} + +function createApi ({ + add, + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const start = Components.start({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + const api = { + add, + init: ERR_ALREADY_INITIALIZED, + start + } + + return api +} diff --git a/src/core/components-ipfsx/start.js b/src/core/components-ipfsx/start.js new file mode 100644 index 0000000000..53a3714b64 --- /dev/null +++ b/src/core/components-ipfsx/start.js @@ -0,0 +1,131 @@ +'use strict' + +const Bitswap = require('ipfs-bitswap') +const PeerBook = require('peer-book') +const IPNS = require('../ipns') +const routingConfig = require('../ipns/routing/config') +const defer = require('p-defer') +const { ERR_ALREADY_INITIALIZED } = require('../../errors') + +const Components = require('.') + +module.exports = ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function start () { + const startPromise = defer() + const { cancel } = apiManager.update({ start: () => startPromise.promise }) + + try { + // The repo may be closed if previously stopped + if (repo.closed) { + await repo.open() + } + + const config = await repo.config.get() + + const peerBook = new PeerBook() + const libp2p = Components.legacy.libp2p({ + _options: constructorOptions, + _repo: repo, + _peerInfo: peerInfo, + _peerInfoBook: peerBook + }, config) + + await libp2p.start() + + const ipnsRouting = routingConfig({ + _options: constructorOptions, + libp2p, + _repo: repo, + _peerInfo: peerInfo + }) + const ipns = new IPNS(ipnsRouting, repo.datastore, peerInfo, keychain, { pass: initOptions.pass }) + const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) + + await bitswap.start() + + blockService.setExchange(bitswap) + + await preload.start() + await ipns.republisher.start() + // TODO: start mfs preload here + + const api = createApi({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => undefined) + } catch (err) { + cancel() + startPromise.reject(err) + throw err + } + + startPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = Components.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + const stop = Components.stop({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + preload, + print, + repo + }) + + const api = { + add, + init: ERR_ALREADY_INITIALIZED, + start: () => apiManager.api, + stop + } + + return api +} diff --git a/src/core/components-ipfsx/stop.js b/src/core/components-ipfsx/stop.js new file mode 100644 index 0000000000..cc6455f24d --- /dev/null +++ b/src/core/components-ipfsx/stop.js @@ -0,0 +1,107 @@ +'use strict' + +const defer = require('p-defer') +const Components = require('.') +const { ERR_NOT_STARTED, ERR_ALREADY_INITIALIZED } = require('../../errors') + +module.exports = ({ + apiManager, + constructorOptions, + bitswap, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function stop () { + const stopPromise = defer() + const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) + + try { + blockService.unsetExchange() + bitswap.stop() + preload.stop() + + await Promise.all([ + ipns.republisher.stop(), + // mfsPreload.stop(), + libp2p.stop(), + repo.close() + ]) + + const api = createApi({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, ERR_NOT_STARTED) + } catch (err) { + cancel() + stopPromise.reject(err) + throw err + } + + stopPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = Components.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + const start = Components.start({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + const api = { + add, + init: ERR_ALREADY_INITIALIZED, + start, + stop: () => apiManager.api + } + + return api +} diff --git a/src/core/components/files-regular/add-async-iterator.js b/src/core/components/files-regular/add-async-iterator.js index e138a1cd66..f69d7268f6 100644 --- a/src/core/components/files-regular/add-async-iterator.js +++ b/src/core/components/files-regular/add-async-iterator.js @@ -1,155 +1,24 @@ 'use strict' -const importer = require('ipfs-unixfs-importer') -const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') -const { parseChunkerString } = require('./utils') -const pipe = require('it-pipe') -const log = require('debug')('ipfs:add') -log.error = require('debug')('ipfs:add:error') - -function noop () {} +const createAdd = require('../../components-ipfsx/add') module.exports = function (self) { - // Internal add func that gets used by all add funcs - return async function * addAsyncIterator (source, options) { - options = options || {} - - const chunkerOptions = parseChunkerString(options.chunker) - - const opts = Object.assign({}, { - shardSplitThreshold: self._options.EXPERIMENTAL.sharding - ? 1000 - : Infinity - }, options, { - strategy: 'balanced', - chunker: chunkerOptions.chunker, - chunkerOptions: chunkerOptions.chunkerOptions - }) - - // CID v0 is for multihashes encoded with sha2-256 - if (opts.hashAlg && opts.cidVersion !== 1) { - opts.cidVersion = 1 - } - - if (opts.trickle) { - opts.strategy = 'trickle' - } - - delete opts.trickle - - let total = 0 - - const prog = opts.progress || noop - const progress = (bytes) => { - total += bytes - prog(total) - } - - opts.progress = progress - - const iterator = pipe( - normaliseAddInput(source), - doImport(self, opts), - transformFile(self, opts), - preloadFile(self, opts), - pinFile(self, opts) - ) - - const releaseLock = await self._gcLock.readLock() - - try { - yield * iterator - } finally { - releaseLock() - } - } -} - -function doImport (ipfs, opts) { - return async function * (source) { // eslint-disable-line require-await - yield * importer(source, ipfs._ipld, opts) - } -} - -function transformFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - let cid = file.cid - const hash = cid.toBaseEncodedString() - let path = file.path ? file.path : hash - - if (opts.wrapWithDirectory && !file.path) { - path = '' - } - - if (opts.onlyHash) { - yield { - path, - hash, - size: file.unixfs.fileSize() - } - - return - } - - const node = await ipfs.object.get(file.cid, Object.assign({}, opts, { preload: false })) - - if (opts.cidVersion === 1) { - cid = cid.toV1() - } - - let size = node.size - - if (Buffer.isBuffer(node)) { - size = node.length - } - - yield { - path, - hash, - size - } - } - } -} - -function preloadFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - const isRootFile = !file.path || opts.wrapWithDirectory - ? file.path === '' - : !file.path.includes('/') + const { + _ipld: ipld, + dag, + _gcLock: gcLock, + _preload: preload, + pin, + _options: config + } = self - const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + const add = createAdd({ ipld, dag, gcLock, preload, pin, config }) - if (shouldPreload) { - ipfs._preload(file.hash) - } - - yield file - } - } -} - -function pinFile (ipfs, opts) { - return async function * (source) { - for await (const file of source) { - // Pin a file if it is the root dir of a recursive add or the single file - // of a direct add. - const pin = 'pin' in opts ? opts.pin : true - const isRootDir = !file.path.includes('/') - const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg - - if (shouldPin) { - // Note: addAsyncIterator() has already taken a GC lock, so tell - // pin.add() not to take a (second) GC lock - await ipfs.pin.add(file.hash, { - preload: false, - lock: false - }) - } + return async function * addAsyncIterator (source, options) { + options = options || {} - yield file + for await (const file of add(source, options)) { + yield { hash: file.cid.toString(), ...file } } } } diff --git a/src/core/components/files-regular/utils.js b/src/core/components/files-regular/utils.js index 876d0b0d48..0a5c8a57ba 100644 --- a/src/core/components/files-regular/utils.js +++ b/src/core/components/files-regular/utils.js @@ -20,86 +20,6 @@ const normalizePath = (path) => { return path } -/** - * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine - * - * - * @param {String} chunker Chunker algorithm supported formats: - * "size-{size}" - * "rabin" - * "rabin-{avg}" - * "rabin-{min}-{avg}-{max}" - * - * @return {Object} Chunker options for DAGBuilder - */ -const parseChunkerString = (chunker) => { - if (!chunker) { - return { - chunker: 'fixed' - } - } else if (chunker.startsWith('size-')) { - const sizeStr = chunker.split('-')[1] - const size = parseInt(sizeStr) - if (isNaN(size)) { - throw new Error('Chunker parameter size must be an integer') - } - return { - chunker: 'fixed', - chunkerOptions: { - maxChunkSize: size - } - } - } else if (chunker.startsWith('rabin')) { - return { - chunker: 'rabin', - chunkerOptions: parseRabinString(chunker) - } - } else { - throw new Error(`Unrecognized chunker option: ${chunker}`) - } -} - -/** - * Parses rabin chunker string - * - * @param {String} chunker Chunker algorithm supported formats: - * "rabin" - * "rabin-{avg}" - * "rabin-{min}-{avg}-{max}" - * - * @return {Object} rabin chunker options - */ -const parseRabinString = (chunker) => { - const options = {} - const parts = chunker.split('-') - switch (parts.length) { - case 1: - options.avgChunkSize = 262144 - break - case 2: - options.avgChunkSize = parseChunkSize(parts[1], 'avg') - break - case 4: - options.minChunkSize = parseChunkSize(parts[1], 'min') - options.avgChunkSize = parseChunkSize(parts[2], 'avg') - options.maxChunkSize = parseChunkSize(parts[3], 'max') - break - default: - throw new Error('Incorrect chunker format (expected "rabin" "rabin-[avg]" or "rabin-[min]-[avg]-[max]"') - } - - return options -} - -const parseChunkSize = (str, name) => { - const size = parseInt(str) - if (isNaN(size)) { - throw new Error(`Chunker parameter ${name} must be an integer`) - } - - return size -} - const mapFile = (file, options) => { options = options || {} @@ -129,8 +49,5 @@ const mapFile = (file, options) => { module.exports = { normalizePath, - parseChunkSize, - parseRabinString, - parseChunkerString, mapFile } diff --git a/src/core/components/pin.js b/src/core/components/pin.js index cbe0c8a250..176e5f5cc8 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -10,7 +10,7 @@ const PinTypes = PinManager.PinTypes module.exports = (self) => { const dag = self.dag - const pinManager = new PinManager(self._repo, dag) + const pinManager = self._pinManager || new PinManager(self._repo, dag) const pin = { add: callbackify.variadic(async (paths, options) => { diff --git a/src/core/errors.js b/src/core/errors.js new file mode 100644 index 0000000000..b938eff2d6 --- /dev/null +++ b/src/core/errors.js @@ -0,0 +1,19 @@ +const errCode = require('err-code') + +exports.ERR_NOT_INITIALIZED = () => { + throw errCode(new Error('not initialized'), 'ERR_NOT_INITIALIZED') +} + +exports.ERR_ALREADY_INITIALIZING = () => { + const msg = 'cannot initialize an initializing node' + throw errCode(new Error(msg), 'ERR_ALREADY_INITIALIZING') +} + +exports.ERR_ALREADY_INITIALIZED = () => { + const msg = 'cannot re-initialize an initialized node' + throw errCode(new Error(msg), 'ERR_ALREADY_INITIALIZED') +} + +exports.ERR_NOT_STARTED = () => { + throw errCode(new Error('not started'), 'ERR_NOT_STARTED') +} diff --git a/src/core/index.js b/src/core/index.js index a5ad33edf9..17a3ff99de 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -28,14 +28,13 @@ const preload = require('./preload') const mfsPreload = require('./mfs-preload') const ipldOptions = require('./runtime/ipld-nodejs') const { isTest } = require('ipfs-utils/src/env') +const ipfsx = require('./ipfsx') /** * @typedef { import("./ipns/index") } IPNS */ /** - * - * * @class IPFS * @extends {EventEmitter} */ @@ -86,7 +85,7 @@ class IPFS extends EventEmitter { this._bitswap = undefined this._blockService = new BlockService(this._repo) this._ipld = new Ipld(ipldOptions(this._blockService, this._options.ipld, this.log)) - this._preload = preload(this) + this._preload = preload(this._options.preload) this._mfsPreload = mfsPreload(this) /** @type {IPNS} */ this._ipns = undefined @@ -177,5 +176,8 @@ module.exports.createNode = (options) => { } module.exports.create = (options) => { + if (options && options.EXPERIMENTAL && options.EXPERIMENTAL.ipfsx) { + return ipfsx(options) + } return new IPFS(options).ready } diff --git a/src/core/ipfsx.js b/src/core/ipfsx.js new file mode 100644 index 0000000000..25d7352457 --- /dev/null +++ b/src/core/ipfsx.js @@ -0,0 +1,45 @@ +'use strict' + +const log = require('debug')('ipfs') +const mergeOptions = require('merge-options') +const { isTest } = require('ipfs-utils/src/env') +const { ERR_NOT_INITIALIZED } = require('./errors') +const { validate } = require('./config') +const Components = require('./components-ipfsx') +const ApiManager = require('./api-manager') + +const getDefaultOptions = () => ({ + init: true, + start: true, + EXPERIMENTAL: {}, + preload: { + enabled: !isTest, // preload by default, unless in test env + addresses: [ + '/dns4/node0.preload.ipfs.io/https', + '/dns4/node1.preload.ipfs.io/https' + ] + } +}) + +module.exports = async options => { + options = mergeOptions(getDefaultOptions(), validate(options || {})) + + // eslint-disable-next-line no-console + const print = options.silent ? log : console.log + + const apiManager = new ApiManager() + const init = Components.init({ apiManager, print, constructorOptions: options }) + const { api } = apiManager.update({ init }, ERR_NOT_INITIALIZED) + + if (!options.init) { + return api + } + + await api.init() + + if (!options.start) { + return api + } + + return api.start() +} diff --git a/src/core/preload.js b/src/core/preload.js index 5427a2ecd0..053103c648 100644 --- a/src/core/preload.js +++ b/src/core/preload.js @@ -10,8 +10,8 @@ const preload = require('./runtime/preload-nodejs') const log = debug('ipfs:preload') log.error = debug('ipfs:preload:error') -module.exports = self => { - const options = self._options.preload || {} +module.exports = options => { + options = options || {} options.enabled = Boolean(options.enabled) options.addresses = options.addresses || [] diff --git a/src/core/runtime/init-assets-browser.js b/src/core/runtime/init-assets-browser.js new file mode 100644 index 0000000000..0c0c42d5b5 --- /dev/null +++ b/src/core/runtime/init-assets-browser.js @@ -0,0 +1 @@ +module.exports = () => {} diff --git a/src/core/runtime/init-assets-nodejs.js b/src/core/runtime/init-assets-nodejs.js new file mode 100644 index 0000000000..d8c4665f42 --- /dev/null +++ b/src/core/runtime/init-assets-nodejs.js @@ -0,0 +1,15 @@ +'use strict' + +const path = require('path') +const globSource = require('ipfs-utils/src/files/glob-source') +const all = require('async-iterator-all') + +// Add the default assets to the repo. +module.exports = async function initAssets ({ add, print }) { + const initDocsPath = path.join(__dirname, '..', '..', 'init-files', 'init-docs') + const results = await all(add(globSource(initDocsPath))) + const dir = results.filter(file => file.path === 'init-docs').pop() + + print('to get started, enter:\n') + print(`\tjsipfs cat /ipfs/${dir.cid}/readme\n`) +} diff --git a/src/core/utils.js b/src/core/utils.js index 8373797dde..2b852410da 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -127,3 +127,25 @@ const resolvePath = async function (objectAPI, ipfsPaths) { exports.normalizePath = normalizePath exports.parseIpfsPath = parseIpfsPath exports.resolvePath = resolvePath + +exports.withFirstAndLast = fn => { + return (...args) => { + const it = fn(...args) + return { + [Symbol.asyncIterator] () { + return it[Symbol.asyncIterator]() + }, + async first () { + const { value } = await it.next() + return value + }, + async last () { + let last + for await (const value of it) { + last = value + } + return last + } + } + } +} diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index bfc0cb6508..0b882fdf01 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -51,6 +51,13 @@ describe('interface-ipfs-core tests', function () { }] }) + tests.filesRegular.ipfsx(CommonFactory.create({ + spawnOptions: { + initOptions: { bits: 512, profile: 'test' }, + EXPERIMENTAL: { ipfsx: true } + } + })) + tests.filesMFS(defaultCommonFactory) tests.key(CommonFactory.createAsync({ From 8deed6a89d76db657b117a6b89d1c368a421141f Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 6 Dec 2019 21:11:48 +0000 Subject: [PATCH 02/13] refactor: standard errors and remove first/last utils --- src/core/components-ipfsx/add/index.js | 5 ++- src/core/components-ipfsx/init.js | 12 +++---- src/core/components-ipfsx/stop.js | 6 ++-- src/core/errors.js | 48 +++++++++++++++++++------- src/core/utils.js | 22 ------------ 5 files changed, 47 insertions(+), 46 deletions(-) diff --git a/src/core/components-ipfsx/add/index.js b/src/core/components-ipfsx/add/index.js index 7ed62899bc..bcd53fd387 100644 --- a/src/core/components-ipfsx/add/index.js +++ b/src/core/components-ipfsx/add/index.js @@ -4,10 +4,9 @@ const importer = require('ipfs-unixfs-importer') const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') const { parseChunkerString } = require('./utils') const pipe = require('it-pipe') -const { withFirstAndLast } = require('../../utils') module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { - return withFirstAndLast(async function * add (source, options) { + return async function * add (source, options) { options = options || {} const opts = { @@ -53,7 +52,7 @@ module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { } finally { releaseLock() } - }) + } } function transformFile (dag, opts) { diff --git a/src/core/components-ipfsx/init.js b/src/core/components-ipfsx/init.js index 5e5acda3b5..15b64e7b93 100644 --- a/src/core/components-ipfsx/init.js +++ b/src/core/components-ipfsx/init.js @@ -15,9 +15,9 @@ const UnixFs = require('ipfs-unixfs') const multicodec = require('multicodec') const multiaddr = require('multiaddr') const { - ERR_ALREADY_INITIALIZING, - ERR_ALREADY_INITIALIZED, - ERR_NOT_STARTED + AlreadyInitializingError, + AlreadyInitializedError, + NotStartedError } = require('../../errors') const BlockService = require('ipfs-block-service') const Ipld = require('ipld') @@ -35,7 +35,7 @@ module.exports = ({ print, constructorOptions }) => async function init (options) { - const { cancel } = apiManager.update({ init: ERR_ALREADY_INITIALIZING }) + const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) try { options = mergeOptions({}, options, constructorOptions.init) @@ -137,7 +137,7 @@ module.exports = ({ repo }) - apiManager.update(api, ERR_NOT_STARTED) + apiManager.update(api, () => { throw new NotStartedError() }) } catch (err) { cancel() throw err @@ -294,7 +294,7 @@ function createApi ({ const api = { add, - init: ERR_ALREADY_INITIALIZED, + init: () => { throw new AlreadyInitializedError() }, start } diff --git a/src/core/components-ipfsx/stop.js b/src/core/components-ipfsx/stop.js index cc6455f24d..8d5ae94624 100644 --- a/src/core/components-ipfsx/stop.js +++ b/src/core/components-ipfsx/stop.js @@ -2,7 +2,7 @@ const defer = require('p-defer') const Components = require('.') -const { ERR_NOT_STARTED, ERR_ALREADY_INITIALIZED } = require('../../errors') +const { NotStartedError, AlreadyInitializedError } = require('../../errors') module.exports = ({ apiManager, @@ -51,7 +51,7 @@ module.exports = ({ repo }) - apiManager.update(api, ERR_NOT_STARTED) + apiManager.update(api, () => { throw new NotStartedError() }) } catch (err) { cancel() stopPromise.reject(err) @@ -98,7 +98,7 @@ function createApi ({ const api = { add, - init: ERR_ALREADY_INITIALIZED, + init: () => { throw new AlreadyInitializedError() }, start, stop: () => apiManager.api } diff --git a/src/core/errors.js b/src/core/errors.js index b938eff2d6..a2ab2542a5 100644 --- a/src/core/errors.js +++ b/src/core/errors.js @@ -1,19 +1,43 @@ -const errCode = require('err-code') - -exports.ERR_NOT_INITIALIZED = () => { - throw errCode(new Error('not initialized'), 'ERR_NOT_INITIALIZED') +class NotInitializedError extends Error { + constructor (message = 'not initialized') { + super(message) + this.name = 'NotInitializedError' + this.code = NotInitializedError.code + } } -exports.ERR_ALREADY_INITIALIZING = () => { - const msg = 'cannot initialize an initializing node' - throw errCode(new Error(msg), 'ERR_ALREADY_INITIALIZING') +NotInitializedError.code = 'ERR_NOT_INITIALIZED' +exports.NotInitializedError = NotInitializedError + +class AlreadyInitializingError extends Error { + constructor (message = 'cannot initialize an initializing node') { + super(message) + this.name = 'AlreadyInitializingError' + this.code = AlreadyInitializedError.code + } } -exports.ERR_ALREADY_INITIALIZED = () => { - const msg = 'cannot re-initialize an initialized node' - throw errCode(new Error(msg), 'ERR_ALREADY_INITIALIZED') +AlreadyInitializingError.code = 'ERR_ALREADY_INITIALIZING' +exports.AlreadyInitializingError = AlreadyInitializingError + +class AlreadyInitializedError extends Error { + constructor (message = 'cannot re-initialize an initialized node') { + super(message) + this.name = 'AlreadyInitializedError' + this.code = AlreadyInitializedError.code + } } -exports.ERR_NOT_STARTED = () => { - throw errCode(new Error('not started'), 'ERR_NOT_STARTED') +AlreadyInitializedError.code = 'ERR_ALREADY_INITIALIZED' +exports.AlreadyInitializedError = AlreadyInitializedError + +class NotStartedError extends Error { + constructor (message = 'not started') { + super(message) + this.name = 'NotStartedError' + this.code = NotStartedError.code + } } + +NotStartedError.code = 'ERR_NOT_STARTED' +exports.NotStartedError = NotStartedError diff --git a/src/core/utils.js b/src/core/utils.js index 2b852410da..8373797dde 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -127,25 +127,3 @@ const resolvePath = async function (objectAPI, ipfsPaths) { exports.normalizePath = normalizePath exports.parseIpfsPath = parseIpfsPath exports.resolvePath = resolvePath - -exports.withFirstAndLast = fn => { - return (...args) => { - const it = fn(...args) - return { - [Symbol.asyncIterator] () { - return it[Symbol.asyncIterator]() - }, - async first () { - const { value } = await it.next() - return value - }, - async last () { - let last - for await (const value of it) { - last = value - } - return last - } - } - } -} From 4f9ba62647b46751355196fa7cef8c7efe982974 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 6 Dec 2019 21:17:15 +0000 Subject: [PATCH 03/13] fix: use standard errors --- src/core/ipfsx.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/core/ipfsx.js b/src/core/ipfsx.js index 25d7352457..585a0a135d 100644 --- a/src/core/ipfsx.js +++ b/src/core/ipfsx.js @@ -3,7 +3,7 @@ const log = require('debug')('ipfs') const mergeOptions = require('merge-options') const { isTest } = require('ipfs-utils/src/env') -const { ERR_NOT_INITIALIZED } = require('./errors') +const { NotInitializedError } = require('./errors') const { validate } = require('./config') const Components = require('./components-ipfsx') const ApiManager = require('./api-manager') @@ -29,7 +29,7 @@ module.exports = async options => { const apiManager = new ApiManager() const init = Components.init({ apiManager, print, constructorOptions: options }) - const { api } = apiManager.update({ init }, ERR_NOT_INITIALIZED) + const { api } = apiManager.update({ init }, () => { throw new NotInitializedError() }) if (!options.init) { return api From d8e82b70e0b75a41a39a07bea166cceeae0570fc Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 9 Dec 2019 15:05:41 +0000 Subject: [PATCH 04/13] refactor: no separate boot procedure --- package.json | 12 +- src/cli/commands/add.js | 4 +- src/cli/commands/init.js | 2 +- src/cli/daemon.js | 4 +- src/core/api-manager.js | 10 +- src/core/boot.js | 90 ----- src/core/components-ipfsx/index.js | 15 - src/core/components-ipfsx/init.js | 302 --------------- src/core/components-ipfsx/start.js | 131 ------- src/core/components-ipfsx/stop.js | 107 ------ .../add/index.js | 3 +- .../add/utils.js | 0 src/core/components/index.js | 37 +- src/core/components/init-assets.js | 20 - src/core/components/init.js | 349 ++++++++++++------ src/core/components/pre-start.js | 75 ---- src/core/components/start.js | 151 ++++++-- src/core/components/stop.js | 139 +++++-- src/core/config.js | 101 ----- src/core/index.js | 211 +++-------- src/core/ipfsx.js | 45 --- src/core/runtime/init-assets-nodejs.js | 2 +- src/core/runtime/repo-browser.js | 5 +- src/core/runtime/repo-nodejs.js | 8 +- src/http/api/resources/files-regular.js | 2 +- test/core/interface.spec.js | 17 - 26 files changed, 556 insertions(+), 1286 deletions(-) delete mode 100644 src/core/boot.js delete mode 100644 src/core/components-ipfsx/index.js delete mode 100644 src/core/components-ipfsx/init.js delete mode 100644 src/core/components-ipfsx/start.js delete mode 100644 src/core/components-ipfsx/stop.js rename src/core/{components-ipfsx => components}/add/index.js (94%) rename src/core/{components-ipfsx => components}/add/utils.js (100%) delete mode 100644 src/core/components/init-assets.js delete mode 100644 src/core/components/pre-start.js delete mode 100644 src/core/config.js delete mode 100644 src/core/ipfsx.js diff --git a/package.json b/package.json index 6d9a098b16..48671af4f2 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,6 @@ ], "main": "src/core/index.js", "browser": { - "./src/core/components/init-assets.js": false, "./src/core/runtime/init-assets-nodejs.js": "./src/core/runtime/init-assets-browser.js", "./src/core/runtime/add-from-fs-nodejs.js": "./src/core/runtime/add-from-fs-browser.js", "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", @@ -26,7 +25,8 @@ "./src/core/runtime/repo-nodejs.js": "./src/core/runtime/repo-browser.js", "./src/core/runtime/ipld-nodejs.js": "./src/core/runtime/ipld-browser.js", "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", - "stream": "readable-stream" + "stream": "readable-stream", + "ipfs-utils/src/files/glob-source": false }, "browser-all-ipld-formats": { "./src/core/runtime/ipld-browser.js": "./src/core/runtime/ipld-browser-all.js" @@ -101,7 +101,7 @@ "ipfs-bitswap": "^0.26.0", "ipfs-block": "~0.8.1", "ipfs-block-service": "~0.16.0", - "ipfs-http-client": "^40.0.1", + "ipfs-http-client": "github:ipfs/js-ipfs-http-client#refactor/async-iterables2", "ipfs-http-response": "~0.4.0", "ipfs-mfs": "^0.13.2", "ipfs-multipart": "^0.2.0", @@ -109,7 +109,7 @@ "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "^0.38.0", "ipfs-unixfs-importer": "^0.40.0", - "ipfs-utils": "~0.4.0", + "ipfs-utils": "^0.5.0", "ipld": "~0.25.0", "ipld-bitcoin": "~0.3.0", "ipld-dag-cbor": "~0.15.0", @@ -206,9 +206,9 @@ "execa": "^3.0.0", "form-data": "^3.0.0", "hat": "0.0.3", - "interface-ipfs-core": "^0.124.1", + "interface-ipfs-core": "github:ipfs/interface-js-ipfs-core#refactor/async-iterables", "ipfs-interop": "^0.1.1", - "ipfsd-ctl": "^0.47.2", + "ipfsd-ctl": "github:ipfs/js-ipfsd-ctl#fix/do-not-call-shutdown-twice", "libp2p-websocket-star": "~0.10.2", "lodash": "^4.17.15", "ncp": "^2.0.0", diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 3bfe6f9866..86ab463361 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -159,7 +159,7 @@ module.exports = { let finalHash try { - for await (const file of ipfs._addAsyncIterator(source, options)) { + for await (const file of ipfs.add(source, options)) { if (argv.silent) { continue } @@ -184,7 +184,7 @@ module.exports = { bar.terminate() } - // Tweak the error message and add more relevant infor for the CLI + // Tweak the error message and add more relevant info for the CLI if (err.code === 'ERR_DIR_NON_RECURSIVE') { err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` } diff --git a/src/cli/commands/init.js b/src/cli/commands/init.js index a8c4f01b5f..899d2883ac 100644 --- a/src/cli/commands/init.js +++ b/src/cli/commands/init.js @@ -65,7 +65,7 @@ module.exports = { const IPFS = require('../../core') const Repo = require('ipfs-repo') - const node = new IPFS({ + const node = await IPFS.create({ repo: new Repo(path), init: false, start: false, diff --git a/src/cli/daemon.js b/src/cli/daemon.js index c2dc556a03..9c320b8098 100644 --- a/src/cli/daemon.js +++ b/src/cli/daemon.js @@ -55,9 +55,7 @@ class Daemon { // start the daemon const ipfsOpts = Object.assign({}, { init: true, start: true, libp2p }, this._options) - const ipfs = await IPFS.create(ipfsOpts) - - this._ipfs = ipfs + const ipfs = this._ipfs = await IPFS.create(ipfsOpts) // start HTTP servers (if API or Gateway is enabled in options) const httpApi = new HttpApi(ipfs, ipfsOpts) diff --git a/src/core/api-manager.js b/src/core/api-manager.js index 8acc3c9f44..1000a28b22 100644 --- a/src/core/api-manager.js +++ b/src/core/api-manager.js @@ -3,11 +3,11 @@ module.exports = class ApiManager { this._api = {} this._onUndef = () => undefined this.api = new Proxy({}, { - get (target, prop) { - return target[prop] === undefined - ? this._onUndef(prop) - : target[prop] - } + get: (_, prop) => { + if (prop === 'then') return undefined // Not a promise! + return this._api[prop] === undefined ? this._onUndef(prop) : this._api[prop] + }, + has: (_, prop) => prop in this._api }) } diff --git a/src/core/boot.js b/src/core/boot.js deleted file mode 100644 index aa47be16af..0000000000 --- a/src/core/boot.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' - -const RepoErrors = require('ipfs-repo').errors - -// Boot an IPFS node depending on the options set -module.exports = async (self) => { - self.log('booting') - const options = self._options - const doInit = options.init - const doStart = options.start - - // Checks if a repo exists, and if so opens it - // Will return callback with a bool indicating the existence - // of the repo - async function repoOpened () { - // nothing to do - if (!self._repo.closed) { - return true - } - - try { - await self._repo.open() - } catch (err) { - if (isRepoUninitializedError(err)) { - return false - } - - if (err) { - throw err - } - } - - return true - } - - // Do the actual boot sequence - try { - // Init with existing initialized, opened, repo - if (await repoOpened()) { - try { - await self.init({ repo: self._repo }) - } catch (err) { - throw Object.assign(err, { emitted: true }) - } - } else if (doInit) { - const defaultInitOptions = { - bits: 2048, - pass: self._options.pass - } - - const initOptions = Object.assign(defaultInitOptions, typeof options.init === 'object' ? options.init : {}) - - await self.init(initOptions) - } - - if (doStart) { - await self.start() - } - - self.log('booted') - self.emit('ready') - } catch (err) { - if (!err.emitted) { - self.emit('error', err) - } - } -} - -function isRepoUninitializedError (err) { - // If the error is that no repo exists, - // which happens when the version file is not found - // we just want to signal that no repo exist, not - // fail the whole process. - - // Use standardized errors as much as possible - if (err.code === RepoErrors.ERR_REPO_NOT_INITIALIZED) { - return true - } - - // TODO: As error codes continue to be standardized, this logic can be phase out; - // it is here to maintain compatibility - if (err.message.match(/not found/) || // indexeddb - err.message.match(/ENOENT/) || // fs - err.message.match(/No value/) // memory - ) { - return true - } - - return false -} diff --git a/src/core/components-ipfsx/index.js b/src/core/components-ipfsx/index.js deleted file mode 100644 index f9a2ee0109..0000000000 --- a/src/core/components-ipfsx/index.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict' - -module.exports = { - add: require('./add'), - init: require('./init'), - start: require('./start'), - stop: require('./stop'), - legacy: { - config: require('../components/config'), - dag: require('../components/dag'), - libp2p: require('../components/libp2p'), - object: require('../components/object'), - pin: require('../components/pin') - } -} diff --git a/src/core/components-ipfsx/init.js b/src/core/components-ipfsx/init.js deleted file mode 100644 index 15b64e7b93..0000000000 --- a/src/core/components-ipfsx/init.js +++ /dev/null @@ -1,302 +0,0 @@ -'use strict' - -const log = require('debug')('ipfs:components:init') -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') -const mergeOptions = require('merge-options') -const promisify = require('promisify-es6') -const getDefaultConfig = require('../runtime/config-nodejs.js') -const createRepo = require('../runtime/repo-nodejs') -const Keychain = require('libp2p-keychain') -const NoKeychain = require('../components/no-keychain') -const GCLock = require('../components/pin/gc-lock') -const { DAGNode } = require('ipld-dag-pb') -const UnixFs = require('ipfs-unixfs') -const multicodec = require('multicodec') -const multiaddr = require('multiaddr') -const { - AlreadyInitializingError, - AlreadyInitializedError, - NotStartedError -} = require('../../errors') -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') -const getDefaultIpldOptions = require('../runtime/ipld-nodejs') -const createPreloader = require('./preload') -const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors -const IPNS = require('../ipns') -const OfflineDatastore = require('../ipns/routing/offline-datastore') -const initAssets = require('../runtime/init-assets-nodejs') -const Components = require('.') -const PinManager = require('../components/pin/pin-manager') - -module.exports = ({ - apiManager, - print, - constructorOptions -}) => async function init (options) { - const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) - - try { - options = mergeOptions({}, options, constructorOptions.init) - - if (constructorOptions.pass) { - options.pass = constructorOptions.pass - } - - if (constructorOptions.config) { - options.config = constructorOptions.config - } - - const repo = typeof options.repo === 'string' || options.repo == null - ? createRepo(options.repo) - : options.repo - - let isInitialized = true - - if (repo.closed) { - try { - await repo.open() - } catch (err) { - if (err.code === ERR_REPO_NOT_INITIALIZED) { - isInitialized = false - } else { - throw err - } - } - } - - const { peerId, config, keychain } = isInitialized - ? await initExistingRepo(repo, options) - : await initNewRepo(repo, options) - - log('peer created') - const peerInfo = new PeerInfo(peerId) - - if (config.Addresses && config.Addresses.Swarm) { - config.Addresses.Swarm.forEach(addr => { - let ma = multiaddr(addr) - - if (ma.getPeerId()) { - ma = ma.encapsulate(`/p2p/${peerInfo.id.toB58String()}`) - } - - peerInfo.multiaddrs.add(ma) - }) - } - - const blockService = new BlockService(repo) - const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) - - const preload = createPreloader(constructorOptions.preload) - await preload.start() - - const gcLock = new GCLock(constructorOptions.repoOwner, { - // Make sure GCLock is specific to repo, for tests where there are - // multiple instances of IPFS - morticeId: repo.path - }) - - const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) - const object = Components.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) - - const pinManager = new PinManager(repo, dag) - await pinManager.load() - - const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) - const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) - - if (!isInitialized && !options.emptyRepo) { - // add empty unixfs dir object (go-ipfs assumes this exists) - const emptyDirCid = await addEmptyDir({ dag }) - - log('adding default assets') - await initAssets({ add, print }) - - log('initializing IPNS keyspace') - // Setup the offline routing for IPNS. - // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. - const offlineDatastore = new OfflineDatastore(repo) - const ipns = new IPNS(offlineDatastore, repo.datastore, peerInfo, keychain, { pass: options.pass }) - await ipns.initializeKeyspace(peerId.privKey.bytes, emptyDirCid.toString()) - } - - const api = createApi({ - add, - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions: options, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo - }) - - apiManager.update(api, () => { throw new NotStartedError() }) - } catch (err) { - cancel() - throw err - } - - return apiManager.api -} - -async function initNewRepo (repo, { privateKey, emptyRepo, bits, profiles, config, pass, print }) { - emptyRepo = emptyRepo || false - bits = bits == null ? 2048 : Number(bits) - - config = mergeOptions(getDefaultConfig(), config) - config = applyProfiles(profiles, config) - - // Verify repo does not exist yet - const exists = await repo.exists() - log('repo exists?', exists) - - if (exists === true) { - throw new Error('repo already exists') - } - - const peerId = await createPeerId({ privateKey, bits, print }) - let keychain = new NoKeychain() - - log('identity generated') - - config.Identity = { - PeerID: peerId.toB58String(), - PrivKey: peerId.privKey.bytes.toString('base64') - } - - privateKey = peerId.privKey - - config.Keychain = Keychain.generateOptions() - - log('peer identity: %s', config.Identity.PeerID) - - await repo.init(config) - await repo.open() - - log('repo opened') - - if (pass) { - log('creating keychain') - const keychainOptions = { passPhrase: pass, ...config.Keychain } - keychain = new Keychain(repo.keys, keychainOptions) - await keychain.importPeer('self', { privKey: privateKey }) - } - - return { peerId, keychain, config } -} - -async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { - let config = await repo.config.get() - - if (newConfig || profiles) { - if (newConfig) { - config = mergeOptions(config, newConfig) - } - if (profiles) { - config = applyProfiles(profiles, config) - } - await repo.config.set(config) - } - - let keychain = new NoKeychain() - - if (pass) { - const keychainOptions = { passPhrase: pass, ...config.Keychain } - keychain = new Keychain(repo.keys, keychainOptions) - log('keychain constructed') - } - - const peerId = await promisify(PeerId.createFromPrivKey)(config.Identity.PrivKey) - - // Import the private key as 'self', if needed. - if (pass) { - try { - await keychain.findKeyByName('self') - } catch (err) { - log('Creating "self" key') - await keychain.importPeer('self', peerId) - } - } - - return { peerId, keychain, config } -} - -function createPeerId ({ privateKey, bits, print }) { - if (privateKey) { - log('using user-supplied private-key') - return typeof privateKey === 'object' - ? privateKey - : promisify(PeerId.createFromPrivKey)(Buffer.from(privateKey, 'base64')) - } else { - // Generate peer identity keypair + transform to desired format + add to config. - print('generating %s-bit RSA keypair...', bits) - return promisify(PeerId.create)({ bits }) - } -} - -async function addEmptyDir ({ dag }) { - const node = new DAGNode(new UnixFs('directory').marshal()) - return dag.put(node, { - version: 0, - format: multicodec.DAG_PB, - hashAlg: multicodec.SHA2_256 - }) -} - -// Apply profiles (e.g. ['server', 'lowpower']) to config -function applyProfiles (profiles, config) { - return (profiles || []).reduce((name, config) => { - const profile = Components.legacy.config.profiles[name] - if (!profile) { - throw new Error(`No profile with name '${name}'`) - } - log('applying profile %s', name) - return profile.transform(config) - }) -} - -function createApi ({ - add, - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo -}) { - const start = Components.start({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo - }) - - const api = { - add, - init: () => { throw new AlreadyInitializedError() }, - start - } - - return api -} diff --git a/src/core/components-ipfsx/start.js b/src/core/components-ipfsx/start.js deleted file mode 100644 index 53a3714b64..0000000000 --- a/src/core/components-ipfsx/start.js +++ /dev/null @@ -1,131 +0,0 @@ -'use strict' - -const Bitswap = require('ipfs-bitswap') -const PeerBook = require('peer-book') -const IPNS = require('../ipns') -const routingConfig = require('../ipns/routing/config') -const defer = require('p-defer') -const { ERR_ALREADY_INITIALIZED } = require('../../errors') - -const Components = require('.') - -module.exports = ({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo -}) => async function start () { - const startPromise = defer() - const { cancel } = apiManager.update({ start: () => startPromise.promise }) - - try { - // The repo may be closed if previously stopped - if (repo.closed) { - await repo.open() - } - - const config = await repo.config.get() - - const peerBook = new PeerBook() - const libp2p = Components.legacy.libp2p({ - _options: constructorOptions, - _repo: repo, - _peerInfo: peerInfo, - _peerInfoBook: peerBook - }, config) - - await libp2p.start() - - const ipnsRouting = routingConfig({ - _options: constructorOptions, - libp2p, - _repo: repo, - _peerInfo: peerInfo - }) - const ipns = new IPNS(ipnsRouting, repo.datastore, peerInfo, keychain, { pass: initOptions.pass }) - const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) - - await bitswap.start() - - blockService.setExchange(bitswap) - - await preload.start() - await ipns.republisher.start() - // TODO: start mfs preload here - - const api = createApi({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo - }) - - apiManager.update(api, () => undefined) - } catch (err) { - cancel() - startPromise.reject(err) - throw err - } - - startPromise.resolve(apiManager.api) - return apiManager.api -} - -function createApi ({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo -}) { - const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) - const object = Components.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) - const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) - const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) - - const stop = Components.stop({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - preload, - print, - repo - }) - - const api = { - add, - init: ERR_ALREADY_INITIALIZED, - start: () => apiManager.api, - stop - } - - return api -} diff --git a/src/core/components-ipfsx/stop.js b/src/core/components-ipfsx/stop.js deleted file mode 100644 index 8d5ae94624..0000000000 --- a/src/core/components-ipfsx/stop.js +++ /dev/null @@ -1,107 +0,0 @@ -'use strict' - -const defer = require('p-defer') -const Components = require('.') -const { NotStartedError, AlreadyInitializedError } = require('../../errors') - -module.exports = ({ - apiManager, - constructorOptions, - bitswap, - blockService, - gcLock, - initOptions, - ipld, - ipns, - keychain, - libp2p, - peerInfo, - pinManager, - preload, - print, - repo -}) => async function stop () { - const stopPromise = defer() - const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) - - try { - blockService.unsetExchange() - bitswap.stop() - preload.stop() - - await Promise.all([ - ipns.republisher.stop(), - // mfsPreload.stop(), - libp2p.stop(), - repo.close() - ]) - - const api = createApi({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo - }) - - apiManager.update(api, () => { throw new NotStartedError() }) - } catch (err) { - cancel() - stopPromise.reject(err) - throw err - } - - stopPromise.resolve(apiManager.api) - return apiManager.api -} - -function createApi ({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo -}) { - const dag = Components.legacy.dag({ _ipld: ipld, _preload: preload }) - const object = Components.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) - const pin = Components.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) - const add = Components.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) - - const start = Components.start({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerInfo, - pinManager, - preload, - print, - repo - }) - - const api = { - add, - init: () => { throw new AlreadyInitializedError() }, - start, - stop: () => apiManager.api - } - - return api -} diff --git a/src/core/components-ipfsx/add/index.js b/src/core/components/add/index.js similarity index 94% rename from src/core/components-ipfsx/add/index.js rename to src/core/components/add/index.js index bcd53fd387..d7e39ce3d4 100644 --- a/src/core/components-ipfsx/add/index.js +++ b/src/core/components/add/index.js @@ -6,11 +6,12 @@ const { parseChunkerString } = require('./utils') const pipe = require('it-pipe') module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { + const isShardingEnabled = constructorOptions.EXPERIMENTAL && constructorOptions.EXPERIMENTAL.sharding return async function * add (source, options) { options = options || {} const opts = { - shardSplitThreshold: constructorOptions.EXPERIMENTAL.sharding ? 1000 : Infinity, + shardSplitThreshold: isShardingEnabled ? 1000 : Infinity, ...options, strategy: 'balanced', ...parseChunkerString(options.chunker) diff --git a/src/core/components-ipfsx/add/utils.js b/src/core/components/add/utils.js similarity index 100% rename from src/core/components-ipfsx/add/utils.js rename to src/core/components/add/utils.js diff --git a/src/core/components/index.js b/src/core/components/index.js index ac893efbdd..eb9b0fb384 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -1,31 +1,14 @@ 'use strict' -exports.preStart = require('./pre-start') +exports.add = require('./add') +exports.init = require('./init') exports.start = require('./start') exports.stop = require('./stop') -exports.isOnline = require('./is-online') -exports.version = require('./version') -exports.id = require('./id') -exports.repo = require('./repo') -exports.init = require('./init') -exports.bootstrap = require('./bootstrap') -exports.config = require('./config') -exports.block = require('./block') -exports.object = require('./object') -exports.dag = require('./dag') -exports.libp2p = require('./libp2p') -exports.swarm = require('./swarm') -exports.ping = require('./ping') -exports.pingPullStream = require('./ping-pull-stream') -exports.pingReadableStream = require('./ping-readable-stream') -exports.pin = require('./pin') -exports.filesRegular = require('./files-regular') -exports.filesMFS = require('./files-mfs') -exports.bitswap = require('./bitswap') -exports.pubsub = require('./pubsub') -exports.dht = require('./dht') -exports.dns = require('./dns') -exports.key = require('./key') -exports.stats = require('./stats') -exports.resolve = require('./resolve') -exports.name = require('./name') + +exports.legacy = { // TODO: these will be removed as the new API is completed + config: require('./config'), + dag: require('./dag'), + libp2p: require('./libp2p'), + object: require('./object'), + pin: require('./pin') +} diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js deleted file mode 100644 index 6f0e4799d3..0000000000 --- a/src/core/components/init-assets.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const path = require('path') -const CID = require('cids') - -// Add the default assets to the repo. -module.exports = async function addDefaultAssets (self, log) { - const initDocsPath = path.join(__dirname, '../../init-files/init-docs') - - const results = await self.addFromFs(initDocsPath, { - recursive: true, - preload: false - }) - - const dir = results.filter(file => file.path === 'init-docs').pop() - const cid = new CID(dir.hash) - - log('to get started, enter:\n') - log(`\tjsipfs cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) -} diff --git a/src/core/components/init.js b/src/core/components/init.js index 5b2a1ec2df..7d86d254c9 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -1,164 +1,307 @@ 'use strict' -const peerId = require('peer-id') +const log = require('debug')('ipfs:components:init') +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') const mergeOptions = require('merge-options') -const callbackify = require('callbackify') const promisify = require('promisify-es6') -const defaultConfig = require('../runtime/config-nodejs.js') +const getDefaultConfig = require('../runtime/config-nodejs.js') +const createRepo = require('../runtime/repo-nodejs') const Keychain = require('libp2p-keychain') -const { - DAGNode -} = require('ipld-dag-pb') +const NoKeychain = require('./no-keychain') +const GCLock = require('./pin/gc-lock') +const { DAGNode } = require('ipld-dag-pb') const UnixFs = require('ipfs-unixfs') const multicodec = require('multicodec') - +const multiaddr = require('multiaddr') +const { + AlreadyInitializingError, + AlreadyInitializedError, + NotStartedError +} = require('../errors') +const BlockService = require('ipfs-block-service') +const Ipld = require('ipld') +const getDefaultIpldOptions = require('../runtime/ipld-nodejs') +const createPreloader = require('../preload') +const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors const IPNS = require('../ipns') const OfflineDatastore = require('../ipns/routing/offline-datastore') +const initAssets = require('../runtime/init-assets-nodejs') +const PinManager = require('./pin/pin-manager') +const Commands = require('./') + +module.exports = ({ + apiManager, + print, + constructorOptions +}) => async function init (options) { + const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) + + try { + options = options || {} + + if (typeof constructorOptions.init === 'object') { + options = mergeOptions(options, constructorOptions.init) + } -const addDefaultAssets = require('./init-assets') -const { profiles } = require('./config') + if (constructorOptions.pass) { + options.pass = constructorOptions.pass + } -function createPeerId (self, opts) { - if (opts.privateKey) { - self.log('using user-supplied private-key') - if (typeof opts.privateKey === 'object') { - return opts.privateKey - } else { - return promisify(peerId.createFromPrivKey)(Buffer.from(opts.privateKey, 'base64')) + if (constructorOptions.config) { + options.config = constructorOptions.config } - } else { - // Generate peer identity keypair + transform to desired format + add to config. - opts.log(`generating ${opts.bits}-bit RSA keypair...`, false) - self.log('generating peer id: %s bits', opts.bits) - return promisify(peerId.create)({ bits: opts.bits }) - } -} + const repo = typeof options.repo === 'string' || options.repo == null + ? createRepo({ path: options.repo, autoMigrate: options.repoAutoMigrate }) + : options.repo + + let isInitialized = true + + if (repo.closed) { + try { + await repo.open() + } catch (err) { + if (err.code === ERR_REPO_NOT_INITIALIZED) { + isInitialized = false + } else { + throw err + } + } + } -async function createRepo (self, opts) { - if (self.state.state() !== 'uninitialized') { - throw new Error('Not able to init from state: ' + self.state.state()) - } + const { peerId, config, keychain } = isInitialized + ? await initExistingRepo(repo, options) + : await initNewRepo(repo, options) + + log('peer created') + const peerInfo = new PeerInfo(peerId) + + if (config.Addresses && config.Addresses.Swarm) { + config.Addresses.Swarm.forEach(addr => { + let ma = multiaddr(addr) - self.state.init() - self.log('init') + if (ma.getPeerId()) { + ma = ma.encapsulate(`/p2p/${peerInfo.id.toB58String()}`) + } - // An initialized, open repo was passed, use this one! - if (opts.repo) { - self._repo = opts.repo + peerInfo.multiaddrs.add(ma) + }) + } + + const blockService = new BlockService(repo) + const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) + + const preload = createPreloader(constructorOptions.preload) + await preload.start() + + const gcLock = new GCLock(constructorOptions.repoOwner, { + // Make sure GCLock is specific to repo, for tests where there are + // multiple instances of IPFS + morticeId: repo.path + }) + + const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = Commands.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + + const pinManager = new PinManager(repo, dag) + await pinManager.load() + + const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) - return + if (!isInitialized && !options.emptyRepo) { + // add empty unixfs dir object (go-ipfs assumes this exists) + const emptyDirCid = await addEmptyDir({ dag }) + + log('adding default assets') + await initAssets({ add, print }) + + log('initializing IPNS keyspace') + // Setup the offline routing for IPNS. + // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. + const offlineDatastore = new OfflineDatastore(repo) + const ipns = new IPNS(offlineDatastore, repo.datastore, peerInfo, keychain, { pass: options.pass }) + await ipns.initializeKeyspace(peerId.privKey.bytes, emptyDirCid.toString()) + } + + const api = createApi({ + add, + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions: options, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + throw err } - opts.emptyRepo = opts.emptyRepo || false - opts.bits = Number(opts.bits) || 2048 - opts.log = opts.log || function () {} + return apiManager.api +} - const config = mergeOptions(defaultConfig(), self._options.config) +async function initNewRepo (repo, { privateKey, emptyRepo, bits, profiles, config, pass, print }) { + emptyRepo = emptyRepo || false + bits = bits == null ? 2048 : Number(bits) - applyProfile(self, config, opts) + config = mergeOptions(getDefaultConfig(), config) + config = applyProfiles(profiles, config) // Verify repo does not exist yet - const exists = await self._repo.exists() - self.log('repo exists?', exists) + const exists = await repo.exists() + log('repo exists?', exists) + if (exists === true) { - throw Error('repo already exists') + throw new Error('repo already exists') } - const peerId = await createPeerId(self, opts) + const peerId = await createPeerId({ privateKey, bits, print }) + let keychain = new NoKeychain() - self.log('identity generated') + log('identity generated') config.Identity = { PeerID: peerId.toB58String(), PrivKey: peerId.privKey.bytes.toString('base64') } - const privateKey = peerId.privKey - if (opts.pass) { - config.Keychain = Keychain.generateOptions() + privateKey = peerId.privKey + + config.Keychain = Keychain.generateOptions() + + log('peer identity: %s', config.Identity.PeerID) + + await repo.init(config) + await repo.open() + + log('repo opened') + + if (pass) { + log('creating keychain') + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + await keychain.importPeer('self', { privKey: privateKey }) } - opts.log('done') - opts.log('peer identity: ' + config.Identity.PeerID) + return { peerId, keychain, config } +} - await self._repo.init(config) - await self._repo.open() +async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { + let config = await repo.config.get() - self.log('repo opened') + if (newConfig || profiles) { + if (newConfig) { + config = mergeOptions(config, newConfig) + } + if (profiles) { + config = applyProfiles(profiles, config) + } + await repo.config.set(config) + } - if (opts.pass) { - self.log('creating keychain') - const keychainOptions = Object.assign({ passPhrase: opts.pass }, config.Keychain) - self._keychain = new Keychain(self._repo.keys, keychainOptions) + let keychain = new NoKeychain() - await self._keychain.importPeer('self', { privKey: privateKey }) + if (pass) { + const keychainOptions = { passPhrase: pass, ...config.Keychain } + keychain = new Keychain(repo.keys, keychainOptions) + log('keychain constructed') } - // Setup the offline routing for IPNS. - // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. - const offlineDatastore = new OfflineDatastore(self._repo) + const peerId = await promisify(PeerId.createFromPrivKey)(config.Identity.PrivKey) - self._ipns = new IPNS(offlineDatastore, self._repo.datastore, self._peerInfo, self._keychain, self._options) + // Import the private key as 'self', if needed. + if (pass) { + try { + await keychain.findKeyByName('self') + } catch (err) { + log('Creating "self" key') + await keychain.importPeer('self', peerId) + } + } - // add empty unixfs dir object (go-ipfs assumes this exists) - return addRepoAssets(self, privateKey, opts) + return { peerId, keychain, config } } -async function addRepoAssets (self, privateKey, opts) { - if (opts.emptyRepo) { - return +function createPeerId ({ privateKey, bits, print }) { + if (privateKey) { + log('using user-supplied private-key') + return typeof privateKey === 'object' + ? privateKey + : promisify(PeerId.createFromPrivKey)(Buffer.from(privateKey, 'base64')) + } else { + // Generate peer identity keypair + transform to desired format + add to config. + print('generating %s-bit RSA keypair...', bits) + return promisify(PeerId.create)({ bits }) } +} - self.log('adding assets') - +async function addEmptyDir ({ dag }) { const node = new DAGNode(new UnixFs('directory').marshal()) - const cid = await self.dag.put(node, { + return dag.put(node, { version: 0, format: multicodec.DAG_PB, hashAlg: multicodec.SHA2_256, preload: false }) - - await self._ipns.initializeKeyspace(privateKey, cid.toBaseEncodedString()) - - self.log('Initialised keyspace') - - if (typeof addDefaultAssets === 'function') { - self.log('Adding default assets') - // addDefaultAssets is undefined on browsers. - // See package.json browser config - return addDefaultAssets(self, opts.log) - } } -// Apply profiles (eg "server,lowpower") to config -function applyProfile (self, config, opts) { - if (opts.profiles) { - for (const name of opts.profiles) { - const profile = profiles[name] - - if (!profile) { - throw new Error(`Could not find profile with name '${name}'`) - } - - self.log(`applying profile ${name}`) - profile.transform(config) +// Apply profiles (e.g. ['server', 'lowpower']) to config +function applyProfiles (profiles, config) { + return (profiles || []).reduce((name, config) => { + const profile = require('./config').profiles[name] + if (!profile) { + throw new Error(`No profile with name '${name}'`) } - } + log('applying profile %s', name) + return profile.transform(config) + }) } -module.exports = function init (self) { - return callbackify.variadic(async (opts) => { - opts = opts || {} - - await createRepo(self, opts) - self.log('Created repo') +function createApi ({ + add, + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const start = Commands.start({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) - await self.preStart() - self.log('Done pre-start') + const api = { + add, + init: () => { throw new AlreadyInitializedError() }, + start + } - self.state.initialized() - self.emit('init') - }) + return api } diff --git a/src/core/components/pre-start.js b/src/core/components/pre-start.js deleted file mode 100644 index 639b94a61f..0000000000 --- a/src/core/components/pre-start.js +++ /dev/null @@ -1,75 +0,0 @@ -'use strict' - -const peerId = require('peer-id') -const PeerInfo = require('peer-info') -const multiaddr = require('multiaddr') -const Keychain = require('libp2p-keychain') -const mergeOptions = require('merge-options') -const NoKeychain = require('./no-keychain') -const callbackify = require('callbackify') -const promisify = require('promisify-es6') - -/* - * Load stuff from Repo into memory - */ -module.exports = function preStart (self) { - return callbackify(async () => { - self.log('pre-start') - - const pass = self._options.pass - let config = await self._repo.config.get() - - if (self._options.config) { - config = mergeOptions(config, self._options.config) - await self.config.replace(config) - } - - // Create keychain configuration, if needed. - if (!config.Keychain) { - config.Keychain = Keychain.generateOptions() - await self.config.set('Keychain', config.Keychain) - self.log('using default keychain options') - } - - // Construct the keychain - if (self._keychain) { - // most likely an init or upgrade has happened - } else if (pass) { - const keychainOptions = Object.assign({ passPhrase: pass }, config.Keychain) - self._keychain = new Keychain(self._repo.keys, keychainOptions) - self.log('keychain constructed') - } else { - self._keychain = new NoKeychain() - self.log('no keychain, use --pass') - } - - const privKey = config.Identity.PrivKey - const id = await promisify(peerId.createFromPrivKey)(privKey) - - // Import the private key as 'self', if needed. - if (pass) { - try { - await self._keychain.findKeyByName('self') - } catch (err) { - self.log('Creating "self" key') - await self._keychain.importPeer('self', id) - } - } - - self.log('peer created') - self._peerInfo = new PeerInfo(id) - if (config.Addresses && config.Addresses.Swarm) { - config.Addresses.Swarm.forEach((addr) => { - let ma = multiaddr(addr) - - if (ma.getPeerId()) { - ma = ma.encapsulate('/ipfs/' + self._peerInfo.id.toB58String()) - } - - self._peerInfo.multiaddrs.add(ma) - }) - } - - await self.pin.pinManager.load() - }) -} diff --git a/src/core/components/start.js b/src/core/components/start.js index b3ea02bfa3..8fcb41ac17 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -1,51 +1,140 @@ 'use strict' const Bitswap = require('ipfs-bitswap') -const callbackify = require('callbackify') - +const PeerBook = require('peer-book') const IPNS = require('../ipns') const routingConfig = require('../ipns/routing/config') -const createLibp2pBundle = require('./libp2p') - -module.exports = (self) => { - return callbackify(async () => { - if (self.state.state() !== 'stopped') { - throw new Error(`Not able to start from state: ${self.state.state()}`) - } +const defer = require('p-defer') +const { AlreadyInitializedError } = require('../errors') +const Commands = require('./') - self.log('starting') - self.state.start() +module.exports = ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function start () { + const startPromise = defer() + const { cancel } = apiManager.update({ start: () => startPromise.promise }) + try { // The repo may be closed if previously stopped - if (self._repo.closed) { - await self._repo.open() + if (repo.closed) { + await repo.open() } - const config = await self._repo.config.get() - const libp2p = createLibp2pBundle(self, config) + const config = await repo.config.get() + + const peerBook = new PeerBook() + const libp2p = Commands.legacy.libp2p({ + _options: constructorOptions, + _repo: repo, + _peerInfo: peerInfo, + _peerInfoBook: peerBook, + _print: print + }, config) await libp2p.start() - self.libp2p = libp2p - const ipnsRouting = routingConfig(self) - self._ipns = new IPNS(ipnsRouting, self._repo.datastore, self._peerInfo, self._keychain, self._options) + const ipnsRouting = routingConfig({ + _options: constructorOptions, + libp2p, + _repo: repo, + _peerInfo: peerInfo + }) + const ipns = new IPNS(ipnsRouting, repo.datastore, peerInfo, keychain, { pass: initOptions.pass }) + const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) + + await bitswap.start() - self._bitswap = new Bitswap( - self.libp2p, - self._repo.blocks, { - statsEnabled: true - } - ) + blockService.setExchange(bitswap) - await self._bitswap.start() + await preload.start() + await ipns.republisher.start() + // TODO: start mfs preload here - self._blockService.setExchange(self._bitswap) + const api = createApi({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo + }) - await self._preload.start() - await self._ipns.republisher.start() - await self._mfsPreload.start() + apiManager.update(api, () => undefined) + } catch (err) { + cancel() + startPromise.reject(err) + throw err + } - self.state.started() - self.emit('start') + startPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = Commands.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + const stop = Commands.stop({ + apiManager, + bitswap, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + preload, + print, + repo }) + + const api = { + add, + init: () => { throw new AlreadyInitializedError() }, + start: () => apiManager.api, + stop + } + + return api } diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 1ee7bb9518..6785cfd889 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -1,40 +1,107 @@ 'use strict' -const callbackify = require('callbackify') - -module.exports = (self) => { - return callbackify(async () => { - self.log('stop') - - if (self.state.state() === 'stopped') { - throw new Error('Already stopped') - } - - if (self.state.state() !== 'running') { - throw new Error('Not able to stop from state: ' + self.state.state()) - } - - self.state.stop() - self._blockService.unsetExchange() - self._bitswap.stop() - self._preload.stop() - - const libp2p = self.libp2p - self.libp2p = null - - try { - await Promise.all([ - self._ipns.republisher.stop(), - self._mfsPreload.stop(), - libp2p.stop(), - self._repo.close() - ]) - - self.state.stopped() - self.emit('stop') - } catch (err) { - self.emit('error', err) - throw err - } +const defer = require('p-defer') +const { NotStartedError, AlreadyInitializedError } = require('../errors') +const Commands = require('./') + +module.exports = ({ + apiManager, + constructorOptions, + bitswap, + blockService, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + peerInfo, + pinManager, + preload, + print, + repo +}) => async function stop () { + const stopPromise = defer() + const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) + + try { + blockService.unsetExchange() + bitswap.stop() + preload.stop() + + await Promise.all([ + ipns.republisher.stop(), + // mfsPreload.stop(), + libp2p.stop(), + repo.close() + ]) + + const api = createApi({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + stopPromise.reject(err) + throw err + } + + stopPromise.resolve(apiManager.api) + return apiManager.api +} + +function createApi ({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo +}) { + const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) + const object = Commands.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) + const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) + + const start = Commands.start({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerInfo, + pinManager, + preload, + print, + repo }) + + const api = { + add, + init: () => { throw new AlreadyInitializedError() }, + start, + stop: () => apiManager.api + } + + return api } diff --git a/src/core/config.js b/src/core/config.js deleted file mode 100644 index 6f2353efb1..0000000000 --- a/src/core/config.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -const Multiaddr = require('multiaddr') -const mafmt = require('mafmt') -const { struct, superstruct } = require('superstruct') -const { isTest } = require('ipfs-utils/src/env') - -const { optional, union } = struct -const s = superstruct({ - types: { - multiaddr: v => { - if (v === null) { - return `multiaddr invalid, value must be a string, Buffer, or another Multiaddr got ${v}` - } - - try { - Multiaddr(v) - } catch (err) { - return `multiaddr invalid, ${err.message}` - } - - return true - }, - 'multiaddr-ipfs': v => mafmt.IPFS.matches(v) ? true : 'multiaddr IPFS invalid' - } -}) - -const configSchema = s({ - repo: optional(s('object|string')), - repoOwner: 'boolean?', - repoAutoMigrate: 'boolean?', - preload: s({ - enabled: 'boolean?', - addresses: optional(s(['multiaddr'])), - interval: 'number?' - }, { enabled: !isTest, interval: 30 * 1000 }), - init: optional(union(['boolean', s({ - bits: 'number?', - emptyRepo: 'boolean?', - privateKey: optional(s('object|string')), // object should be a custom type for PeerId using 'kind-of' - pass: 'string?', - profiles: 'array?' - })])), - start: 'boolean?', - offline: 'boolean?', - pass: 'string?', - silent: 'boolean?', - relay: 'object?', // relay validates in libp2p - EXPERIMENTAL: optional(s({ - pubsub: 'boolean?', - ipnsPubsub: 'boolean?', - sharding: 'boolean?', - dht: 'boolean?' - })), - connectionManager: 'object?', - config: optional(s({ - API: 'object?', - Addresses: optional(s({ - Delegates: optional(s(['multiaddr'])), - Swarm: optional(s(['multiaddr'])), - API: optional(union([s('multiaddr'), s(['multiaddr'])])), - Gateway: optional(union([s('multiaddr'), s(['multiaddr'])])) - })), - Discovery: optional(s({ - MDNS: optional(s({ - Enabled: 'boolean?', - Interval: 'number?' - })), - webRTCStar: optional(s({ - Enabled: 'boolean?' - })) - })), - Bootstrap: optional(s(['multiaddr-ipfs'])), - Pubsub: optional(s({ - Router: 'string?', - Enabled: 'boolean?' - })), - Swarm: optional(s({ - ConnMgr: optional(s({ - LowWater: 'number?', - HighWater: 'number?' - })) - })) - })), - ipld: 'object?', - libp2p: optional(union(['function', 'object'])) // libp2p validates this -}, { - repoOwner: true -}) - -const validate = (opts) => { - const [err, options] = configSchema.validate(opts) - - if (err) { - throw err - } - - return options -} - -module.exports = { validate } diff --git a/src/core/index.js b/src/core/index.js index 17a3ff99de..878e1c4957 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -1,183 +1,74 @@ 'use strict' -const BlockService = require('ipfs-block-service') -const Ipld = require('ipld') +const log = require('debug')('ipfs') +const mergeOptions = require('merge-options') +const { isTest } = require('ipfs-utils/src/env') +const globSource = require('ipfs-utils/src/files/glob-source') +const urlSource = require('ipfs-utils/src/files/url-source') +const { Buffer } = require('buffer') const PeerId = require('peer-id') const PeerInfo = require('peer-info') const crypto = require('libp2p-crypto') const isIPFS = require('is-ipfs') const multiaddr = require('multiaddr') const multihash = require('multihashes') -const PeerBook = require('peer-book') const multibase = require('multibase') const multicodec = require('multicodec') const multihashing = require('multihashing-async') const CID = require('cids') -const debug = require('debug') -const mergeOptions = require('merge-options') -const EventEmitter = require('events') - -const config = require('./config') -const boot = require('./boot') -const components = require('./components') -const GCLock = require('./components/pin/gc-lock') - -// replaced by repo-browser when running in the browser -const defaultRepo = require('./runtime/repo-nodejs') -const preload = require('./preload') -const mfsPreload = require('./mfs-preload') -const ipldOptions = require('./runtime/ipld-nodejs') -const { isTest } = require('ipfs-utils/src/env') -const ipfsx = require('./ipfsx') - -/** - * @typedef { import("./ipns/index") } IPNS - */ - -/** - * @class IPFS - * @extends {EventEmitter} - */ -class IPFS extends EventEmitter { - constructor (options) { - super() - - const defaults = { - init: true, - start: true, - EXPERIMENTAL: {}, - preload: { - enabled: !isTest, // preload by default, unless in test env - addresses: [ - '/dnsaddr/node0.preload.ipfs.io/https', - '/dnsaddr/node1.preload.ipfs.io/https' - ] - } - } - - options = config.validate(options || {}) - - this._options = mergeOptions(defaults, options) - - if (options.init === false) { - this._options.init = false - } - - if (!(options.start === false)) { - this._options.start = true - } - - if (typeof options.repo === 'string' || - options.repo === undefined) { - this._repo = defaultRepo(options) - } else { - this._repo = options.repo - } - - // IPFS utils - this.log = debug('ipfs') - this.log.err = debug('ipfs:err') - - // IPFS Core Internals - // this._repo - assigned above - this._peerInfoBook = new PeerBook() - this._peerInfo = undefined - this._bitswap = undefined - this._blockService = new BlockService(this._repo) - this._ipld = new Ipld(ipldOptions(this._blockService, this._options.ipld, this.log)) - this._preload = preload(this._options.preload) - this._mfsPreload = mfsPreload(this) - /** @type {IPNS} */ - this._ipns = undefined - // eslint-disable-next-line no-console - this._print = this._options.silent ? this.log : console.log - this._gcLock = new GCLock(this._options.repoOwner, { - // Make sure GCLock is specific to repo, for tests where there are - // multiple instances of IPFS - morticeId: this._repo.path - }) - - // IPFS Core exposed components - // - for booting up a node - this.init = components.init(this) - this.preStart = components.preStart(this) - this.start = components.start(this) - this.stop = components.stop(this) - this.shutdown = this.stop - this.isOnline = components.isOnline(this) - // - interface-ipfs-core defined API - Object.assign(this, components.filesRegular(this)) - this.version = components.version(this) - this.id = components.id(this) - this.repo = components.repo(this) - this.bootstrap = components.bootstrap(this) - this.config = components.config(this) - this.block = components.block(this) - this.object = components.object(this) - this.dag = components.dag(this) - this.files = components.filesMFS(this) - this.libp2p = null // assigned on start - this.swarm = components.swarm(this) - this.name = components.name(this) - this.bitswap = components.bitswap(this) - this.pin = components.pin(this) - this.ping = components.ping(this) - this.pingPullStream = components.pingPullStream(this) - this.pingReadableStream = components.pingReadableStream(this) - this.pubsub = components.pubsub(this) - this.dht = components.dht(this) - this.dns = components.dns(this) - this.key = components.key(this) - this.stats = components.stats(this) - this.resolve = components.resolve(this) - - if (this._options.EXPERIMENTAL.ipnsPubsub) { - this.log('EXPERIMENTAL IPNS pubsub is enabled') - } - if (this._options.EXPERIMENTAL.sharding) { - this.log('EXPERIMENTAL sharding is enabled') - } - - this.state = require('./state')(this) - - const onReady = () => { - this.removeListener('error', onError) - this._ready = true - } +const { NotInitializedError } = require('./errors') +const createInitApi = require('./components/init') +const ApiManager = require('./api-manager') + +const getDefaultOptions = () => ({ + init: true, + start: true, + EXPERIMENTAL: {}, + preload: { + enabled: !isTest, // preload by default, unless in test env + addresses: [ + '/dns4/node0.preload.ipfs.io/https', + '/dns4/node1.preload.ipfs.io/https' + ] + } +}) - const onError = err => { - this.removeListener('ready', onReady) - this._readyError = err - } +async function create (options) { + options = mergeOptions(getDefaultOptions(), options) - this.once('ready', onReady).once('error', onError) + // eslint-disable-next-line no-console + const print = options.silent ? log : console.log - boot(this) - } + const apiManager = new ApiManager() + const init = createInitApi({ apiManager, print, constructorOptions: options }) + const { api } = apiManager.update({ init }, () => { throw new NotInitializedError() }) - get ready () { - return new Promise((resolve, reject) => { - if (this._ready) return resolve(this) - if (this._readyError) return reject(this._readyError) - this.once('ready', () => resolve(this)) - this.once('error', reject) - }) + if (!options.init) { + return api } -} -module.exports = IPFS + await api.init() -// Note: We need to do this to force browserify to load the Buffer module -const BufferImpl = Buffer -Object.assign(module.exports, { crypto, isIPFS, Buffer: BufferImpl, CID, multiaddr, multibase, multihash, multihashing, multicodec, PeerId, PeerInfo }) + if (!options.start) { + return api + } -module.exports.createNode = (options) => { - return new IPFS(options) + return api.start() } -module.exports.create = (options) => { - if (options && options.EXPERIMENTAL && options.EXPERIMENTAL.ipfsx) { - return ipfsx(options) - } - return new IPFS(options).ready +module.exports = { + create, + crypto, + isIPFS, + Buffer, + CID, + multiaddr, + multibase, + multihash, + multihashing, + multicodec, + PeerId, + PeerInfo, + globSource, + urlSource } diff --git a/src/core/ipfsx.js b/src/core/ipfsx.js deleted file mode 100644 index 585a0a135d..0000000000 --- a/src/core/ipfsx.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict' - -const log = require('debug')('ipfs') -const mergeOptions = require('merge-options') -const { isTest } = require('ipfs-utils/src/env') -const { NotInitializedError } = require('./errors') -const { validate } = require('./config') -const Components = require('./components-ipfsx') -const ApiManager = require('./api-manager') - -const getDefaultOptions = () => ({ - init: true, - start: true, - EXPERIMENTAL: {}, - preload: { - enabled: !isTest, // preload by default, unless in test env - addresses: [ - '/dns4/node0.preload.ipfs.io/https', - '/dns4/node1.preload.ipfs.io/https' - ] - } -}) - -module.exports = async options => { - options = mergeOptions(getDefaultOptions(), validate(options || {})) - - // eslint-disable-next-line no-console - const print = options.silent ? log : console.log - - const apiManager = new ApiManager() - const init = Components.init({ apiManager, print, constructorOptions: options }) - const { api } = apiManager.update({ init }, () => { throw new NotInitializedError() }) - - if (!options.init) { - return api - } - - await api.init() - - if (!options.start) { - return api - } - - return api.start() -} diff --git a/src/core/runtime/init-assets-nodejs.js b/src/core/runtime/init-assets-nodejs.js index d8c4665f42..81c0a34832 100644 --- a/src/core/runtime/init-assets-nodejs.js +++ b/src/core/runtime/init-assets-nodejs.js @@ -7,7 +7,7 @@ const all = require('async-iterator-all') // Add the default assets to the repo. module.exports = async function initAssets ({ add, print }) { const initDocsPath = path.join(__dirname, '..', '..', 'init-files', 'init-docs') - const results = await all(add(globSource(initDocsPath))) + const results = await all(add(globSource(initDocsPath, { recursive: true }), { preload: false })) const dir = results.filter(file => file.path === 'init-docs').pop() print('to get started, enter:\n') diff --git a/src/core/runtime/repo-browser.js b/src/core/runtime/repo-browser.js index 8bd0f330e2..de4c9f59bf 100644 --- a/src/core/runtime/repo-browser.js +++ b/src/core/runtime/repo-browser.js @@ -3,6 +3,7 @@ const IPFSRepo = require('ipfs-repo') module.exports = (options) => { - const repoPath = options.repo || 'ipfs' - return new IPFSRepo(repoPath, { autoMigrate: options.repoAutoMigrate }) + options = options || {} + const repoPath = options.path || 'ipfs' + return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/src/core/runtime/repo-nodejs.js b/src/core/runtime/repo-nodejs.js index 431d59b377..d8581b7e32 100644 --- a/src/core/runtime/repo-nodejs.js +++ b/src/core/runtime/repo-nodejs.js @@ -4,8 +4,8 @@ const os = require('os') const IPFSRepo = require('ipfs-repo') const path = require('path') -module.exports = (options) => { - const repoPath = options.repo || path.join(os.homedir(), '.jsipfs') - - return new IPFSRepo(repoPath, { autoMigrate: options.repoAutoMigrate }) +module.exports = options => { + options = options || {} + const repoPath = options.path || path.join(os.homedir(), '.jsipfs') + return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index bc963ce3d1..9513a9a124 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -208,7 +208,7 @@ exports.add = { } }, function (source) { - return ipfs._addAsyncIterator(source, { + return ipfs.add(source, { cidVersion: request.query['cid-version'], rawLeaves: request.query['raw-leaves'], progress: request.query.progress ? progressHandler : null, diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index 0b882fdf01..031ff409ab 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -41,23 +41,6 @@ describe('interface-ipfs-core tests', function () { } }) - tests.filesRegular(defaultCommonFactory, { - skip: isNode ? null : [{ - name: 'addFromStream', - reason: 'Not designed to run in the browser' - }, { - name: 'addFromFs', - reason: 'Not designed to run in the browser' - }] - }) - - tests.filesRegular.ipfsx(CommonFactory.create({ - spawnOptions: { - initOptions: { bits: 512, profile: 'test' }, - EXPERIMENTAL: { ipfsx: true } - } - })) - tests.filesMFS(defaultCommonFactory) tests.key(CommonFactory.createAsync({ From cbdf9e65c4ff1aba0d22efd5965a411482a89c76 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 10 Dec 2019 11:07:05 +0000 Subject: [PATCH 05/13] fix: convert config API as it used by ipfsd-ctl --- package.json | 1 + src/core/components/add/index.js | 15 ++- src/core/components/config.js | 17 ++- src/core/components/index.js | 2 +- src/core/components/init.js | 1 + src/core/components/start.js | 1 + src/core/components/stop.js | 1 + test/cli/files.js | 2 +- test/core/config.spec.js | 223 ------------------------------- test/core/files.spec.js | 13 +- test/core/interface.spec.js | 2 +- 11 files changed, 30 insertions(+), 248 deletions(-) delete mode 100644 test/core/config.spec.js diff --git a/package.json b/package.json index 48671af4f2..d29278f6de 100644 --- a/package.json +++ b/package.json @@ -209,6 +209,7 @@ "interface-ipfs-core": "github:ipfs/interface-js-ipfs-core#refactor/async-iterables", "ipfs-interop": "^0.1.1", "ipfsd-ctl": "github:ipfs/js-ipfsd-ctl#fix/do-not-call-shutdown-twice", + "it-all": "^1.0.1", "libp2p-websocket-star": "~0.10.2", "lodash": "^4.17.15", "ncp": "^2.0.0", diff --git a/src/core/components/add/index.js b/src/core/components/add/index.js index d7e39ce3d4..4eed312d9f 100644 --- a/src/core/components/add/index.js +++ b/src/core/components/add/index.js @@ -59,21 +59,23 @@ module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { function transformFile (dag, opts) { return async function * (source) { for await (const { cid, path, unixfs } of source) { + const hash = cid.toString() + if (opts.onlyHash) { yield { - cid, - path: path || cid.toString(), + hash, + path: path || hash, size: unixfs.fileSize() } continue } - const node = await dag.get(cid, { ...opts, preload: false }) + const { value: node } = await dag.get(cid, { ...opts, preload: false }) yield { - cid, - path: path || cid.toString(), + hash, + path: path || hash, size: Buffer.isBuffer(node) ? node.length : node.size } } @@ -103,9 +105,8 @@ function pinFile (pin, opts) { for await (const file of source) { // Pin a file if it is the root dir of a recursive add or the single file // of a direct add. - const pin = 'pin' in opts ? opts.pin : true const isRootDir = !file.path.includes('/') - const shouldPin = pin && isRootDir && !opts.onlyHash + const shouldPin = (opts.pin == null ? true : opts.pin) && isRootDir && !opts.onlyHash if (shouldPin) { // Note: addAsyncIterator() has already taken a GC lock, so tell diff --git a/src/core/components/config.js b/src/core/components/config.js index 381a36ce61..c747387c73 100644 --- a/src/core/components/config.js +++ b/src/core/components/config.js @@ -1,17 +1,16 @@ 'use strict' -const callbackify = require('callbackify') const getDefaultConfig = require('../runtime/config-nodejs.js') const log = require('debug')('ipfs:core:config') -module.exports = function config (self) { +module.exports = ({ repo }) => { return { - get: callbackify.variadic(self._repo.config.get), - set: callbackify(self._repo.config.set), - replace: callbackify.variadic(self._repo.config.set), + get: repo.config.get, + set: repo.config.set, + replace: repo.config.set, profiles: { - apply: callbackify.variadic(applyProfile), - list: callbackify.variadic(listProfiles) + apply: applyProfile, + list: listProfiles } } @@ -26,12 +25,12 @@ module.exports = function config (self) { } try { - const oldCfg = await self.config.get() + const oldCfg = await repo.config.get() let newCfg = JSON.parse(JSON.stringify(oldCfg)) // clone newCfg = profile.transform(newCfg) if (!dryRun) { - await self.config.replace(newCfg) + await repo.config.set(newCfg) } // Scrub private key from output diff --git a/src/core/components/index.js b/src/core/components/index.js index eb9b0fb384..44d922712a 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -1,12 +1,12 @@ 'use strict' exports.add = require('./add') +exports.config = require('./config') exports.init = require('./init') exports.start = require('./start') exports.stop = require('./stop') exports.legacy = { // TODO: these will be removed as the new API is completed - config: require('./config'), dag: require('./dag'), libp2p: require('./libp2p'), object: require('./object'), diff --git a/src/core/components/init.js b/src/core/components/init.js index 7d86d254c9..089d6148dc 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -299,6 +299,7 @@ function createApi ({ const api = { add, + config: Commands.config({ repo }), init: () => { throw new AlreadyInitializedError() }, start } diff --git a/src/core/components/start.js b/src/core/components/start.js index 8fcb41ac17..f9f41c7458 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -131,6 +131,7 @@ function createApi ({ const api = { add, + config: Commands.config({ repo }), init: () => { throw new AlreadyInitializedError() }, start: () => apiManager.api, stop diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 6785cfd889..4e2a9bb036 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -98,6 +98,7 @@ function createApi ({ const api = { add, + config: Commands.config({ repo }), init: () => { throw new AlreadyInitializedError() }, start, stop: () => apiManager.api diff --git a/test/cli/files.js b/test/cli/files.js index 4858cc0cb0..ec7e0263a6 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -26,7 +26,7 @@ const HASH_ALGS = [ 'keccak-512' ] -describe('files', () => runOnAndOff((thing) => { +describe.only('files', () => runOnAndOff((thing) => { let ipfs const readme = fs.readFileSync(path.join(process.cwd(), '/src/init-files/init-docs/readme')) .toString('utf-8') diff --git a/test/core/config.spec.js b/test/core/config.spec.js deleted file mode 100644 index ee1fa4a00e..0000000000 --- a/test/core/config.spec.js +++ /dev/null @@ -1,223 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const { expect } = require('interface-ipfs-core/src/utils/mocha') -const config = require('../../src/core/config') - -describe('config', () => { - it('should allow empty config', () => { - const cfg = {} - expect(() => config.validate(cfg)).to.not.throw() - }) - - it('should allow undefined config', () => { - const cfg = undefined - expect(() => config.validate(cfg)).to.not.throw() - }) - - it('should validate valid repo', () => { - const cfgs = [ - { repo: { unknown: 'value' } }, - { repo: '/path/to-repo' }, - { repo: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid repo', () => { - const cfgs = [ - { repo: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid init', () => { - const cfgs = [ - { init: { bits: 138 } }, - { init: true }, - { init: false }, - { init: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid init', () => { - const cfgs = [ - { init: 138 }, - { init: { bits: 'not an int' } } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid start', () => { - const cfgs = [ - { start: true }, - { start: false }, - { start: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid start', () => { - const cfgs = [ - { start: 138 }, - { start: 'make it so number 1' }, - { start: null } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid pass', () => { - const cfgs = [ - { pass: 'correctbatteryhorsestaple' }, - { pass: '' }, - { pass: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid pass', () => { - const cfgs = [ - { pass: 138 }, - { pass: null } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid EXPERIMENTAL', () => { - const cfgs = [ - { EXPERIMENTAL: { dht: true, sharding: true } }, - { EXPERIMENTAL: { dht: false, sharding: false } }, - { EXPERIMENTAL: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid EXPERIMENTAL', () => { - const cfgs = [ - { EXPERIMENTAL: { dht: 138 } }, - { EXPERIMENTAL: { sharding: 138 } } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid config', () => { - const cfgs = [ - { config: { Addresses: { Swarm: ['/ip4/0.0.0.0/tcp/4002'] } } }, - { config: { Addresses: { Swarm: [] } } }, - { config: { Addresses: { Swarm: undefined } } }, - - { config: { Addresses: { API: '/ip4/127.0.0.1/tcp/5002' } } }, - { config: { Addresses: { API: ['/ip4/127.0.0.1/tcp/5002', '/ip4/127.0.0.1/tcp/5003'] } } }, - { config: { Addresses: { API: undefined } } }, - - { config: { Addresses: { Gateway: '/ip4/127.0.0.1/tcp/9090' } } }, - { config: { Addresses: { Gateway: ['/ip4/127.0.0.1/tcp/9090', '/ip4/127.0.0.1/tcp/9091'] } } }, - { config: { Addresses: { Gateway: undefined } } }, - - { config: { Addresses: { Delegates: ['/dns4/node0.preload.ipfs.io/tcp/443/https'] } } }, - { config: { Addresses: { Delegates: [] } } }, - { config: { Addresses: { Delegates: undefined } } }, - - { config: { Addresses: undefined } }, - - { config: { Discovery: { MDNS: { Enabled: true } } } }, - { config: { Discovery: { MDNS: { Enabled: false } } } }, - { config: { Discovery: { MDNS: { Interval: 138 } } } }, - { config: { Discovery: { MDNS: undefined } } }, - - { config: { Discovery: { webRTCStar: { Enabled: true } } } }, - { config: { Discovery: { webRTCStar: { Enabled: false } } } }, - { config: { Discovery: { webRTCStar: undefined } } }, - - { config: { Discovery: undefined } }, - - { config: { Bootstrap: ['/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'] } }, - { config: { Bootstrap: [] } }, - - { config: { Swarm: { ConnMgr: { LowWater: 200, HighWater: 500 } } } }, - { config: { Swarm: { ConnMgr: { LowWater: undefined, HighWater: undefined } } } }, - { config: { Swarm: { ConnMgr: undefined } } }, - { config: { Swarm: undefined } }, - - { config: { Pubsub: { Enabled: true, Router: 'gossipsub' } } }, - { config: { Pubsub: { Enabled: false } } }, - - { config: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid config', () => { - const cfgs = [ - { config: { Addresses: { Swarm: 138 } } }, - { config: { Addresses: { Swarm: null } } }, - - { config: { Addresses: { API: 138 } } }, - { config: { Addresses: { API: null } } }, - - { config: { Addresses: { Gateway: 138 } } }, - { config: { Addresses: { Gateway: null } } }, - - { config: { Discovery: { MDNS: { Enabled: 138 } } } }, - { config: { Discovery: { MDNS: { Interval: true } } } }, - - { config: { Discovery: { webRTCStar: { Enabled: 138 } } } }, - - { config: { Bootstrap: ['/ip4/0.0.0.0/tcp/4002'] } }, - { config: { Bootstrap: 138 } }, - - { config: { Swarm: { ConnMgr: { LowWater: 200, HighWater: {} } } } }, - { config: { Swarm: { ConnMgr: { LowWater: {}, HighWater: 500 } } } }, - { config: { Swarm: { ConnMgr: 138 } } }, - { config: { Swarm: 138 } }, - - { config: { Pubsub: { Enabled: 1 } } }, - - { config: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid libp2p', () => { - const cfgs = [ - { libp2p: { modules: {} } }, - { libp2p: () => {} }, - { libp2p: undefined } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.not.throw()) - }) - - it('should validate invalid libp2p', () => { - const cfgs = [ - { libp2p: 'error' }, - { libp2p: 138 } - ] - - cfgs.forEach(cfg => expect(() => config.validate(cfg)).to.throw()) - }) - - it('should validate valid profiles', () => { - expect( - () => config.validate({ init: { profiles: ['test'] } }) - ).to.not.throw() - }) - it('should validate invalid profiles', () => { - expect( - () => config.validate({ init: { profiles: 'test' } }) - ).to.throw() - }) -}) diff --git a/test/core/files.spec.js b/test/core/files.spec.js index ea8ca2380a..484c06ba43 100644 --- a/test/core/files.spec.js +++ b/test/core/files.spec.js @@ -6,9 +6,10 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha') const hat = require('hat') const pull = require('pull-stream') const IPFSFactory = require('ipfsd-ctl') +const all = require('it-all') const IPFS = require('../../src/core') -describe('files', function () { +describe.only('files', function () { this.timeout(10 * 1000) let ipfsd, ipfs @@ -74,13 +75,13 @@ describe('files', function () { describe('add', () => { it('should not error when passed null options', async () => { - await ipfs.add(Buffer.from(hat()), null) + await all(ipfs.add(Buffer.from(hat()), null)) }) it('should add a file with a v1 CID', async () => { - const files = await ipfs.add(Buffer.from([0, 1, 2]), { + const files = await all(ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1 - }) + })) expect(files.length).to.equal(1) expect(files[0].hash).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') @@ -88,10 +89,10 @@ describe('files', function () { }) it('should add a file with a v1 CID and not raw leaves', async () => { - const files = await ipfs.add(Buffer.from([0, 1, 2]), { + const files = await all(ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1, rawLeaves: false - }) + })) expect(files.length).to.equal(1) expect(files[0].hash).to.equal('bafybeide2caf5we5a7izifzwzz5ds2gla67vsfgrzvbzpnyyirnfzgwf5e') diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index 031ff409ab..ff5cc32391 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -41,7 +41,7 @@ describe('interface-ipfs-core tests', function () { } }) - tests.filesMFS(defaultCommonFactory) + tests.files(defaultCommonFactory) tests.key(CommonFactory.createAsync({ spawnOptions: { From 21e62cb37b3abb4df46100ec937d62b6c7bcb489 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Thu, 12 Dec 2019 12:23:43 +0000 Subject: [PATCH 06/13] refactor: return CID instance from ipfs.add BREAKING CHANGE: `ipfs.add` results now contain a `cid` property (a [CID instance](https://github.com/multiformats/js-cid)) instead of a string `hash` property. refs https://github.com/ipfs/interface-js-ipfs-core/issues/394 --- src/cli/commands/add.js | 8 ++++---- src/core/components/add/index.js | 10 ++++------ src/http/api/resources/files-regular.js | 2 +- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 86ab463361..b482522e02 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -156,7 +156,7 @@ module.exports = { ? globSource(argv.file, { recursive: argv.recursive }) : process.stdin // Pipe directly to ipfs.add - let finalHash + let finalCid try { for await (const file of ipfs.add(source, options)) { @@ -165,11 +165,11 @@ module.exports = { } if (argv.quieter) { - finalHash = file.hash + finalCid = file.cid continue } - const cid = cidToString(file.hash, { base: argv.cidBase }) + const cid = cidToString(file.cid, { base: argv.cidBase }) let message = cid if (!argv.quiet) { @@ -197,7 +197,7 @@ module.exports = { } if (argv.quieter) { - log(cidToString(finalHash, { base: argv.cidBase })) + log(cidToString(finalCid, { base: argv.cidBase })) } })()) } diff --git a/src/core/components/add/index.js b/src/core/components/add/index.js index 4eed312d9f..ee655817e1 100644 --- a/src/core/components/add/index.js +++ b/src/core/components/add/index.js @@ -59,12 +59,10 @@ module.exports = ({ ipld, dag, gcLock, preload, pin, constructorOptions }) => { function transformFile (dag, opts) { return async function * (source) { for await (const { cid, path, unixfs } of source) { - const hash = cid.toString() - if (opts.onlyHash) { yield { - hash, - path: path || hash, + cid, + path: path || cid.toString(), size: unixfs.fileSize() } @@ -74,8 +72,8 @@ function transformFile (dag, opts) { const { value: node } = await dag.get(cid, { ...opts, preload: false }) yield { - hash, - path: path || hash, + cid, + path: path || cid.toString(), size: Buffer.isBuffer(node) ? node.length : node.size } } diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index 9513a9a124..1f949fb7a0 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -225,7 +225,7 @@ exports.add = { for await (const file of source) { output.write(JSON.stringify({ Name: file.path, - Hash: cidToString(file.hash, { base: request.query['cid-base'] }), + Hash: cidToString(file.cid, { base: request.query['cid-base'] }), Size: file.size }) + '\n') } From 0921a828915d950fd1d7411e99f0d952c1c59ad8 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 13 Dec 2019 17:38:12 +0000 Subject: [PATCH 07/13] refactor: convert ping API to async/await (#2671) --- src/core/components/index.js | 1 + src/core/components/ping-pull-stream.js | 100 -------------------- src/core/components/ping-readable-stream.js | 7 -- src/core/components/ping.js | 52 +++++++--- src/core/components/start.js | 1 + 5 files changed, 41 insertions(+), 120 deletions(-) delete mode 100644 src/core/components/ping-pull-stream.js delete mode 100644 src/core/components/ping-readable-stream.js diff --git a/src/core/components/index.js b/src/core/components/index.js index 44d922712a..64afaa2b09 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -3,6 +3,7 @@ exports.add = require('./add') exports.config = require('./config') exports.init = require('./init') +exports.ping = require('./ping') exports.start = require('./start') exports.stop = require('./stop') diff --git a/src/core/components/ping-pull-stream.js b/src/core/components/ping-pull-stream.js deleted file mode 100644 index 838378bace..0000000000 --- a/src/core/components/ping-pull-stream.js +++ /dev/null @@ -1,100 +0,0 @@ -'use strict' - -const debug = require('debug') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const PeerId = require('peer-id') -const pull = require('pull-stream') -const Pushable = require('pull-pushable') - -const log = debug('ipfs:pingPullStream') -log.error = debug('ipfs:pingPullStream:error') - -module.exports = function pingPullStream (self) { - return (peerId, opts) => { - if (!self.isOnline()) { - return pull.error(new Error(OFFLINE_ERROR)) - } - - opts = Object.assign({ count: 10 }, opts) - - const source = Pushable() - - getPeer(self.libp2p, source, peerId, (err, peer) => { - if (err) { - log.error(err) - source.end(err) - return - } - - runPing(self.libp2p, source, opts.count, peer, (err) => { - if (err) { - log.error(err) - source.push(getPacket({ success: false, text: err.toString() })) - source.end() - } - }) - }) - - return source - } -} - -function getPacket (msg) { - // Default msg - const basePacket = { success: true, time: 0, text: '' } - return Object.assign(basePacket, msg) -} - -function getPeer (libp2pNode, statusStream, peerIdStr, cb) { - let peerId - - try { - peerId = PeerId.createFromB58String(peerIdStr) - } catch (err) { - return cb(err) - } - - let peerInfo - - try { - peerInfo = libp2pNode.peerBook.get(peerId) - } catch (err) { - log('Peer not found in peer book, trying peer routing') - - // Share lookup status just as in the go implemmentation - statusStream.push(getPacket({ text: `Looking up peer ${peerIdStr}` })) - return libp2pNode.peerRouting.findPeer(peerId, cb) - } - - cb(null, peerInfo) -} - -function runPing (libp2pNode, statusStream, count, peer, cb) { - libp2pNode.ping(peer, (err, p) => { - if (err) { return cb(err) } - - let packetCount = 0 - let totalTime = 0 - statusStream.push(getPacket({ text: `PING ${peer.id.toB58String()}` })) - - p.on('ping', (time) => { - statusStream.push(getPacket({ time })) - totalTime += time - packetCount++ - if (packetCount >= count) { - const average = totalTime / count - p.stop() - statusStream.push(getPacket({ text: `Average latency: ${average}ms` })) - statusStream.end() - } - }) - - p.on('error', (err) => { - log.error(err) - p.stop() - cb(err) - }) - - p.start() - }) -} diff --git a/src/core/components/ping-readable-stream.js b/src/core/components/ping-readable-stream.js deleted file mode 100644 index b6809ffb48..0000000000 --- a/src/core/components/ping-readable-stream.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const toStream = require('pull-stream-to-stream') - -module.exports = function pingReadableStream (self) { - return (peerId, opts) => toStream.source(self.pingPullStream(peerId, opts)) -} diff --git a/src/core/components/ping.js b/src/core/components/ping.js index 5f0aa61be3..efc0e1bc33 100644 --- a/src/core/components/ping.js +++ b/src/core/components/ping.js @@ -1,18 +1,44 @@ 'use strict' -const promisify = require('promisify-es6') -const pull = require('pull-stream/pull') - -module.exports = function ping (self) { - return promisify((peerId, opts, callback) => { - if (typeof opts === 'function') { - callback = opts - opts = {} +const PeerId = require('peer-id') +const basePacket = { success: true, time: 0, text: '' } + +module.exports = ({ libp2p }) => { + return async function * (peerId, options) { + options = options || {} + options.count = options.count || 10 + + if (!PeerId.isPeerId(peerId)) { + peerId = PeerId.createFromCID(peerId) } - pull( - self.pingPullStream(peerId, opts), - pull.collect(callback) - ) - }) + let peerInfo + if (libp2p.peerStore.has(peerId)) { + peerInfo = libp2p.peerStore.get(peerId) + } else { + yield { ...basePacket, text: `Looking up peer ${peerId}` } + peerInfo = await libp2p.peerRouting.findPeer(peerId) + } + + yield { ...basePacket, text: `PING ${peerInfo.id.toB58String()}` } + + let packetCount = 0 + let totalTime = 0 + + for (let i = 0; i < options.count; i++) { + try { + const time = libp2p.ping(peerInfo) + totalTime += time + packetCount++ + yield { ...basePacket, time } + } catch (err) { + yield { ...basePacket, success: false, text: err.toString() } + } + } + + if (packetCount) { + const average = totalTime / packetCount + yield { ...basePacket, text: `Average latency: ${average}ms` } + } + } } diff --git a/src/core/components/start.js b/src/core/components/start.js index f9f41c7458..bbae1cb271 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -133,6 +133,7 @@ function createApi ({ add, config: Commands.config({ repo }), init: () => { throw new AlreadyInitializedError() }, + ping: Commands.ping({ libp2p }), start: () => apiManager.api, stop } From 78f361ef3b5a3230b43634b7e7898d892b391dc7 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Fri, 13 Dec 2019 17:39:18 +0000 Subject: [PATCH 08/13] refactor: convert pubsub API to async/await (#2672) * refactor: convert pubsub API to async/await * refactor: switch wording to not enabled --- src/core/components/pubsub.js | 92 +++-------------------------------- src/core/components/start.js | 5 +- src/core/errors.js | 11 +++++ 3 files changed, 22 insertions(+), 86 deletions(-) diff --git a/src/core/components/pubsub.js b/src/core/components/pubsub.js index 8c5916b906..0954304f46 100644 --- a/src/core/components/pubsub.js +++ b/src/core/components/pubsub.js @@ -1,90 +1,12 @@ 'use strict' -const callbackify = require('callbackify') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const errcode = require('err-code') - -module.exports = function pubsub (self) { - function checkOnlineAndEnabled () { - if (!self.isOnline()) { - throw errcode(new Error(OFFLINE_ERROR), 'ERR_OFFLINE') - } - - if (!self.libp2p.pubsub) { - throw errcode(new Error('pubsub is not enabled'), 'ERR_PUBSUB_DISABLED') - } - } - +module.exports = ({ libp2p }) => { return { - subscribe: (topic, handler, options, callback) => { - if (typeof options === 'function') { - callback = options - options = {} - } - - if (typeof callback === 'function') { - try { - checkOnlineAndEnabled() - } catch (err) { - return callback(err) - } - - self.libp2p.pubsub.subscribe(topic, handler, options, callback) - return - } - - try { - checkOnlineAndEnabled() - } catch (err) { - return Promise.reject(err) - } - - return self.libp2p.pubsub.subscribe(topic, handler, options) - }, - - unsubscribe: (topic, handler, callback) => { - if (typeof callback === 'function') { - try { - checkOnlineAndEnabled() - } catch (err) { - return callback(err) - } - - self.libp2p.pubsub.unsubscribe(topic, handler, callback) - return - } - - try { - checkOnlineAndEnabled() - } catch (err) { - return Promise.reject(err) - } - - return self.libp2p.pubsub.unsubscribe(topic, handler) - }, - - publish: callbackify(async (topic, data) => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - await self.libp2p.pubsub.publish(topic, data) - }), - - ls: callbackify(async () => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - return self.libp2p.pubsub.ls() - }), - - peers: callbackify(async (topic) => { // eslint-disable-line require-await - checkOnlineAndEnabled() - - return self.libp2p.pubsub.peers(topic) - }), - - setMaxListeners (n) { - checkOnlineAndEnabled() - - self.libp2p.pubsub.setMaxListeners(n) - } + subscribe: (...args) => libp2p.pubsub.subscribe(...args), + unsubscribe: (...args) => libp2p.pubsub.unsubscribe(...args), + publish: (...args) => libp2p.pubsub.publish(...args), + ls: (...args) => libp2p.pubsub.getTopics(...args), + peers: (...args) => libp2p.pubsub.getSubscribers(...args), + setMaxListeners: (n) => libp2p.pubsub.setMaxListeners(n) } } diff --git a/src/core/components/start.js b/src/core/components/start.js index bbae1cb271..214563b53e 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -5,7 +5,7 @@ const PeerBook = require('peer-book') const IPNS = require('../ipns') const routingConfig = require('../ipns/routing/config') const defer = require('p-defer') -const { AlreadyInitializedError } = require('../errors') +const { AlreadyInitializedError, NotEnabledError } = require('../errors') const Commands = require('./') module.exports = ({ @@ -134,6 +134,9 @@ function createApi ({ config: Commands.config({ repo }), init: () => { throw new AlreadyInitializedError() }, ping: Commands.ping({ libp2p }), + pubsub: libp2p.pubsub + ? Commands.pubsub({ libp2p }) + : () => { throw new NotEnabledError('pubsub not enabled') }, start: () => apiManager.api, stop } diff --git a/src/core/errors.js b/src/core/errors.js index a2ab2542a5..c7cf7ac938 100644 --- a/src/core/errors.js +++ b/src/core/errors.js @@ -41,3 +41,14 @@ class NotStartedError extends Error { NotStartedError.code = 'ERR_NOT_STARTED' exports.NotStartedError = NotStartedError + +class NotEnabledError extends Error { + constructor (message = 'not enabled') { + super(message) + this.name = 'NotEnabledError' + this.code = NotEnabledError.code + } +} + +NotEnabledError.code = 'ERR_NOT_ENABLED' +exports.NotEnabledError = NotEnabledError From 1449103430a7ba302150932665e64d8bbd9d7ff9 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 16 Dec 2019 17:01:33 +0000 Subject: [PATCH 09/13] refactor: convert bitswap API to async/await (#2659) --- src/core/components/bitswap.js | 72 ------------------------- src/core/components/bitswap/stat.js | 25 +++++++++ src/core/components/bitswap/unwant.js | 20 +++++++ src/core/components/bitswap/wantlist.js | 17 ++++++ src/core/components/index.js | 5 ++ src/core/components/start.js | 5 ++ 6 files changed, 72 insertions(+), 72 deletions(-) delete mode 100644 src/core/components/bitswap.js create mode 100644 src/core/components/bitswap/stat.js create mode 100644 src/core/components/bitswap/unwant.js create mode 100644 src/core/components/bitswap/wantlist.js diff --git a/src/core/components/bitswap.js b/src/core/components/bitswap.js deleted file mode 100644 index 654f9f045b..0000000000 --- a/src/core/components/bitswap.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR -const callbackify = require('callbackify') -const Big = require('bignumber.js') -const CID = require('cids') -const PeerId = require('peer-id') -const errCode = require('err-code') - -function formatWantlist (list, cidBase) { - return Array.from(list).map((e) => ({ '/': e[1].cid.toBaseEncodedString(cidBase) })) -} - -module.exports = function bitswap (self) { - return { - wantlist: callbackify.variadic(async (peerId) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - let list - - if (peerId) { - peerId = PeerId.createFromB58String(peerId) - - list = self._bitswap.wantlistForPeer(peerId) - } else { - list = self._bitswap.getWantlist() - } - - return { Keys: formatWantlist(list) } - }), - - stat: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const snapshot = self._bitswap.stat().snapshot - - return { - provideBufLen: parseInt(snapshot.providesBufferLength.toString()), - blocksReceived: new Big(snapshot.blocksReceived), - wantlist: formatWantlist(self._bitswap.getWantlist()), - peers: self._bitswap.peers().map((id) => id.toB58String()), - dupBlksReceived: new Big(snapshot.dupBlksReceived), - dupDataReceived: new Big(snapshot.dupDataReceived), - dataReceived: new Big(snapshot.dataReceived), - blocksSent: new Big(snapshot.blocksSent), - dataSent: new Big(snapshot.dataSent) - } - }), - - unwant: callbackify(async (keys) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - if (!Array.isArray(keys)) { - keys = [keys] - } - - try { - keys = keys.map((key) => new CID(key)) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - return self._bitswap.unwant(keys) - }) - } -} diff --git a/src/core/components/bitswap/stat.js b/src/core/components/bitswap/stat.js new file mode 100644 index 0000000000..b2b609d44d --- /dev/null +++ b/src/core/components/bitswap/stat.js @@ -0,0 +1,25 @@ +'use strict' + +const Big = require('bignumber.js') + +function formatWantlist (list) { + return Array.from(list).map((e) => ({ '/': e[1].cid.toString() })) +} + +module.exports = ({ bitswap }) => { + return async function stat () { // eslint-disable-line require-await + const snapshot = bitswap.stat().snapshot + + return { + provideBufLen: parseInt(snapshot.providesBufferLength.toString()), + blocksReceived: new Big(snapshot.blocksReceived), + wantlist: formatWantlist(bitswap.getWantlist()), + peers: bitswap.peers().map((id) => id.toB58String()), + dupBlksReceived: new Big(snapshot.dupBlksReceived), + dupDataReceived: new Big(snapshot.dupDataReceived), + dataReceived: new Big(snapshot.dataReceived), + blocksSent: new Big(snapshot.blocksSent), + dataSent: new Big(snapshot.dataSent) + } + } +} diff --git a/src/core/components/bitswap/unwant.js b/src/core/components/bitswap/unwant.js new file mode 100644 index 0000000000..9f71172ef4 --- /dev/null +++ b/src/core/components/bitswap/unwant.js @@ -0,0 +1,20 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +module.exports = ({ bitswap }) => { + return async function unwant (keys) { // eslint-disable-line require-await + if (!Array.isArray(keys)) { + keys = [keys] + } + + try { + keys = keys.map((key) => new CID(key)) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + return bitswap.unwant(keys) + } +} diff --git a/src/core/components/bitswap/wantlist.js b/src/core/components/bitswap/wantlist.js new file mode 100644 index 0000000000..02c882abc6 --- /dev/null +++ b/src/core/components/bitswap/wantlist.js @@ -0,0 +1,17 @@ +'use strict' + +const PeerId = require('peer-id') + +function formatWantlist (list) { + return Array.from(list).map((e) => ({ '/': e[1].cid.toString() })) +} + +module.exports = ({ bitswap }) => { + return async function wantlist (peerId) { // eslint-disable-line require-await + const list = peerId + ? bitswap.wantlistForPeer(PeerId.createFromCID(peerId)) + : bitswap.getWantlist() + + return { Keys: formatWantlist(list) } + } +} diff --git a/src/core/components/index.js b/src/core/components/index.js index 64afaa2b09..e9cfcbb92c 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -1,6 +1,11 @@ 'use strict' exports.add = require('./add') +exports.bitswap = { + stat: require('./bitswap/stat'), + unwant: require('./bitswap/unwant'), + wantlist: require('./bitswap/wantlist') +} exports.config = require('./config') exports.init = require('./init') exports.ping = require('./ping') diff --git a/src/core/components/start.js b/src/core/components/start.js index 214563b53e..ddb3a9b4b1 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -131,6 +131,11 @@ function createApi ({ const api = { add, + bitswap: { + stat: Commands.bitswap.stat({ bitswap }), + unwant: Commands.bitswap.unwant({ bitswap }), + wantlist: Commands.bitswap.wantlist({ bitswap }) + }, config: Commands.config({ repo }), init: () => { throw new AlreadyInitializedError() }, ping: Commands.ping({ libp2p }), From 20cbd6defe022344000c6d301908fc68e671b9e8 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 16 Dec 2019 17:06:06 +0000 Subject: [PATCH 10/13] refactor: convert id API to async/await (#2665) --- src/core/components/id.js | 13 ++++++------- src/core/components/index.js | 1 + src/core/components/init.js | 1 + src/core/components/start.js | 1 + src/core/components/stop.js | 1 + 5 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/core/components/id.js b/src/core/components/id.js index a8fd75f92d..f848d96e8f 100644 --- a/src/core/components/id.js +++ b/src/core/components/id.js @@ -1,14 +1,13 @@ 'use strict' -const callbackify = require('callbackify') const pkgversion = require('../../../package.json').version -module.exports = function id (self) { - return callbackify(async () => { // eslint-disable-line require-await +module.exports = ({ peerInfo }) => { + return async function id () { // eslint-disable-line require-await return { - id: self._peerInfo.id.toB58String(), - publicKey: self._peerInfo.id.pubKey.bytes.toString('base64'), - addresses: self._peerInfo.multiaddrs + id: peerInfo.id.toB58String(), + publicKey: peerInfo.id.pubKey.bytes.toString('base64'), + addresses: peerInfo.multiaddrs .toArray() .map((ma) => ma.toString()) .filter((ma) => ma.indexOf('ipfs') >= 0) @@ -16,5 +15,5 @@ module.exports = function id (self) { agentVersion: `js-ipfs/${pkgversion}`, protocolVersion: '9000' } - }) + } } diff --git a/src/core/components/index.js b/src/core/components/index.js index e9cfcbb92c..7fcdaf831d 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -7,6 +7,7 @@ exports.bitswap = { wantlist: require('./bitswap/wantlist') } exports.config = require('./config') +exports.id = require('./id') exports.init = require('./init') exports.ping = require('./ping') exports.start = require('./start') diff --git a/src/core/components/init.js b/src/core/components/init.js index 089d6148dc..09201762d9 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -300,6 +300,7 @@ function createApi ({ const api = { add, config: Commands.config({ repo }), + id: Commands.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, start } diff --git a/src/core/components/start.js b/src/core/components/start.js index ddb3a9b4b1..e0437e9171 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -137,6 +137,7 @@ function createApi ({ wantlist: Commands.bitswap.wantlist({ bitswap }) }, config: Commands.config({ repo }), + id: Commands.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, ping: Commands.ping({ libp2p }), pubsub: libp2p.pubsub diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 4e2a9bb036..5e48a9cb1d 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -99,6 +99,7 @@ function createApi ({ const api = { add, config: Commands.config({ repo }), + id: Commands.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, start, stop: () => apiManager.api From ed5e9ef1cfc3cd019e4ceeb0ecd01162ce0d8078 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 16 Dec 2019 17:07:21 +0000 Subject: [PATCH 11/13] refactor: convert version API to async/await (#2679) --- src/core/components/index.js | 1 + src/core/components/init.js | 3 ++- src/core/components/start.js | 3 ++- src/core/components/stop.js | 3 ++- src/core/components/version.js | 9 ++++----- 5 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/core/components/index.js b/src/core/components/index.js index 7fcdaf831d..e664e55c55 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -12,6 +12,7 @@ exports.init = require('./init') exports.ping = require('./ping') exports.start = require('./start') exports.stop = require('./stop') +exports.version = require('./version') exports.legacy = { // TODO: these will be removed as the new API is completed dag: require('./dag'), diff --git a/src/core/components/init.js b/src/core/components/init.js index 09201762d9..642b9e63db 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -302,7 +302,8 @@ function createApi ({ config: Commands.config({ repo }), id: Commands.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, - start + start, + version: Commands.version({ repo }) } return api diff --git a/src/core/components/start.js b/src/core/components/start.js index e0437e9171..0fb44f22aa 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -144,7 +144,8 @@ function createApi ({ ? Commands.pubsub({ libp2p }) : () => { throw new NotEnabledError('pubsub not enabled') }, start: () => apiManager.api, - stop + stop, + version: Commands.version({ repo }) } return api diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 5e48a9cb1d..1e70484005 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -102,7 +102,8 @@ function createApi ({ id: Commands.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, start, - stop: () => apiManager.api + stop: () => apiManager.api, + version: Commands.version({ repo }) } return api diff --git a/src/core/components/version.js b/src/core/components/version.js index cc850c465d..7b9d963b14 100644 --- a/src/core/components/version.js +++ b/src/core/components/version.js @@ -1,17 +1,16 @@ 'use strict' const pkg = require('../../../package.json') -const callbackify = require('callbackify') // TODO add the commit hash of the current ipfs version to the response. -module.exports = function version (self) { - return callbackify(async () => { - const repoVersion = await self.repo.version() +module.exports = ({ repo }) => { + return async function version () { + const repoVersion = await repo.version() return { version: pkg.version, repo: repoVersion, commit: '' } - }) + } } From 920e399bd838b63dfe8e9de5fbf980d42b05c27d Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 16 Dec 2019 21:31:50 +0000 Subject: [PATCH 12/13] refactor: convert object API to async/await (#2666) * refactor: convert object API to async/await * fix: object export --- src/core/components/index.js | 15 +- src/core/components/init.js | 18 +- src/core/components/object.js | 302 ------------------ src/core/components/object/data.js | 9 + src/core/components/object/get.js | 48 +++ src/core/components/object/links.js | 58 ++++ src/core/components/object/new.js | 43 +++ src/core/components/object/patch/add-link.js | 12 + .../components/object/patch/append-data.js | 14 + src/core/components/object/patch/rm-link.js | 12 + src/core/components/object/patch/set-data.js | 13 + src/core/components/object/put.js | 85 +++++ src/core/components/object/stat.js | 28 ++ src/core/components/start.js | 16 +- src/core/components/stop.js | 16 +- 15 files changed, 383 insertions(+), 306 deletions(-) delete mode 100644 src/core/components/object.js create mode 100644 src/core/components/object/data.js create mode 100644 src/core/components/object/get.js create mode 100644 src/core/components/object/links.js create mode 100644 src/core/components/object/new.js create mode 100644 src/core/components/object/patch/add-link.js create mode 100644 src/core/components/object/patch/append-data.js create mode 100644 src/core/components/object/patch/rm-link.js create mode 100644 src/core/components/object/patch/set-data.js create mode 100644 src/core/components/object/put.js create mode 100644 src/core/components/object/stat.js diff --git a/src/core/components/index.js b/src/core/components/index.js index e664e55c55..1abb7f4499 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -9,6 +9,20 @@ exports.bitswap = { exports.config = require('./config') exports.id = require('./id') exports.init = require('./init') +exports.object = { + data: require('./object/data'), + get: require('./object/get'), + links: require('./object/links'), + new: require('./object/new'), + patch: { + addLink: require('./object/patch/add-link'), + appendData: require('./object/patch/append-data'), + rmLink: require('./object/patch/rm-link'), + setData: require('./object/patch/set-data') + }, + put: require('./object/put'), + stat: require('./object/stat') +} exports.ping = require('./ping') exports.start = require('./start') exports.stop = require('./stop') @@ -17,6 +31,5 @@ exports.version = require('./version') exports.legacy = { // TODO: these will be removed as the new API is completed dag: require('./dag'), libp2p: require('./libp2p'), - object: require('./object'), pin: require('./pin') } diff --git a/src/core/components/init.js b/src/core/components/init.js index 642b9e63db..e5b6788f63 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -102,7 +102,20 @@ module.exports = ({ }) const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) - const object = Commands.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const object = { + data: Commands.object.data({ ipld, preload }), + get: Commands.object.get({ ipld, preload }), + links: Commands.object.links({ dag }), + new: Commands.object.new({ ipld, preload }), + patch: { + addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Commands.object.put({ ipld, gcLock, preload }), + stat: Commands.object.stat({ ipld, preload }) + } const pinManager = new PinManager(repo, dag) await pinManager.load() @@ -134,6 +147,7 @@ module.exports = ({ initOptions: options, ipld, keychain, + object, peerInfo, pinManager, preload, @@ -276,6 +290,7 @@ function createApi ({ initOptions, ipld, keychain, + object, peerInfo, pinManager, preload, @@ -302,6 +317,7 @@ function createApi ({ config: Commands.config({ repo }), id: Commands.id({ peerInfo }), init: () => { throw new AlreadyInitializedError() }, + object, start, version: Commands.version({ repo }) } diff --git a/src/core/components/object.js b/src/core/components/object.js deleted file mode 100644 index 1f7e3f7cbe..0000000000 --- a/src/core/components/object.js +++ /dev/null @@ -1,302 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink -const CID = require('cids') -const mh = require('multihashes') -const multicodec = require('multicodec') -const Unixfs = require('ipfs-unixfs') -const errCode = require('err-code') - -function normalizeMultihash (multihash, enc) { - if (typeof multihash === 'string') { - if (enc === 'base58' || !enc) { - return multihash - } - - return Buffer.from(multihash, enc) - } else if (Buffer.isBuffer(multihash)) { - return multihash - } else if (CID.isCID(multihash)) { - return multihash.buffer - } else { - throw new Error('unsupported multihash') - } -} - -function parseBuffer (buf, encoding) { - switch (encoding) { - case 'json': - return parseJSONBuffer(buf) - case 'protobuf': - return parseProtoBuffer(buf) - default: - throw new Error(`unkown encoding: ${encoding}`) - } -} - -function parseJSONBuffer (buf) { - let data - let links - - try { - const parsed = JSON.parse(buf.toString()) - - links = (parsed.Links || []).map((link) => { - return new DAGLink( - link.Name || link.name, - link.Size || link.size, - mh.fromB58String(link.Hash || link.hash || link.multihash) - ) - }) - data = Buffer.from(parsed.Data) - } catch (err) { - throw new Error('failed to parse JSON: ' + err) - } - - return new DAGNode(data, links) -} - -function parseProtoBuffer (buf) { - return dagPB.util.deserialize(buf) -} - -function findLinks (node, links = []) { - for (const key in node) { - const val = node[key] - - if (key === '/' && Object.keys(node).length === 1) { - try { - links.push(new DAGLink('', 0, new CID(val))) - continue - } catch (_) { - // not a CID - } - } - - if (CID.isCID(val)) { - links.push(new DAGLink('', 0, val)) - - continue - } - - if (Array.isArray(val)) { - findLinks(val, links) - } - - if (typeof val === 'object' && !(val instanceof String)) { - findLinks(val, links) - } - } - - return links -} - -module.exports = function object (self) { - async function editAndSave (multihash, edit, options) { - options = options || {} - - const node = await self.object.get(multihash, options) - - // edit applies the edit func passed to - // editAndSave - const cid = await self._ipld.put(edit(node), multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } - - return { - new: callbackify.variadic(async (template, options) => { - options = options || {} - - // allow options in the template position - if (template && typeof template !== 'string') { - options = template - template = null - } - - let data - - if (template) { - if (template === 'unixfs-dir') { - data = (new Unixfs('directory')).marshal() - } else { - throw new Error('unknown template') - } - } else { - data = Buffer.alloc(0) - } - - const node = new DAGNode(data) - - const cid = await self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - }), - put: callbackify.variadic(async (obj, options) => { - options = options || {} - - const encoding = options.enc - let node - - if (Buffer.isBuffer(obj)) { - if (encoding) { - node = await parseBuffer(obj, encoding) - } else { - node = new DAGNode(obj) - } - } else if (DAGNode.isDAGNode(obj)) { - // already a dag node - node = obj - } else if (typeof obj === 'object') { - node = new DAGNode(obj.Data, obj.Links) - } else { - throw new Error('obj not recognized') - } - - const release = await self._gcLock.readLock() - - try { - const cid = await self._ipld.put(node, multicodec.DAG_PB, { - cidVersion: 0, - hashAlg: multicodec.SHA2_256 - }) - - if (options.preload !== false) { - self._preload(cid) - } - - return cid - } finally { - release() - } - }), - - get: callbackify.variadic(async (multihash, options) => { // eslint-disable-line require-await - options = options || {} - - let mh, cid - - try { - mh = normalizeMultihash(multihash, options.enc) - } catch (err) { - throw errCode(err, 'ERR_INVALID_MULTIHASH') - } - - try { - cid = new CID(mh) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - - if (options.cidVersion === 1) { - cid = cid.toV1() - } - - if (options.preload !== false) { - self._preload(cid) - } - - return self._ipld.get(cid) - }), - - data: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const node = await self.object.get(multihash, options) - - return node.Data - }), - - links: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const cid = new CID(multihash) - const result = await self.dag.get(cid, options) - - if (cid.codec === 'raw') { - return [] - } - - if (cid.codec === 'dag-pb') { - return result.value.Links - } - - if (cid.codec === 'dag-cbor') { - return findLinks(result) - } - - throw new Error(`Cannot resolve links from codec ${cid.codec}`) - }), - - stat: callbackify.variadic(async (multihash, options) => { - options = options || {} - - const node = await self.object.get(multihash, options) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { - cidVersion: 0 - }) - - const blockSize = serialized.length - const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) - - return { - Hash: cid.toBaseEncodedString(), - NumLinks: node.Links.length, - BlockSize: blockSize, - LinksSize: blockSize - node.Data.length, - DataSize: node.Data.length, - CumulativeSize: blockSize + linkLength - } - }), - - patch: { - addLink: callbackify.variadic(async (multihash, link, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - node.addLink(link) - - return node - }, options) - }), - - rmLink: callbackify.variadic(async (multihash, linkRef, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - node.rmLink(linkRef.Name || linkRef.name) - - return node - }, options) - }), - - appendData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - const newData = Buffer.concat([node.Data, data]) - - return new DAGNode(newData, node.Links) - }, options) - }), - - setData: callbackify.variadic(async (multihash, data, options) => { // eslint-disable-line require-await - return editAndSave(multihash, (node) => { - return new DAGNode(data, node.Links) - }, options) - }) - } - } -} diff --git a/src/core/components/object/data.js b/src/core/components/object/data.js new file mode 100644 index 0000000000..e7066f3d74 --- /dev/null +++ b/src/core/components/object/data.js @@ -0,0 +1,9 @@ +'use strict' + +module.exports = ({ ipld, preload }) => { + const get = require('./get')({ ipld, preload }) + return async function data (multihash, options) { + const node = await get(multihash, options) + return node.Data + } +} diff --git a/src/core/components/object/get.js b/src/core/components/object/get.js new file mode 100644 index 0000000000..394235cc6c --- /dev/null +++ b/src/core/components/object/get.js @@ -0,0 +1,48 @@ +'use strict' + +const CID = require('cids') +const errCode = require('err-code') + +function normalizeMultihash (multihash, enc) { + if (typeof multihash === 'string') { + if (enc === 'base58' || !enc) { + return multihash + } + return Buffer.from(multihash, enc) + } else if (Buffer.isBuffer(multihash)) { + return multihash + } else if (CID.isCID(multihash)) { + return multihash.buffer + } + throw new Error('unsupported multihash') +} + +module.exports = ({ ipld, preload }) => { + return async function get (multihash, options) { // eslint-disable-line require-await + options = options || {} + + let mh, cid + + try { + mh = normalizeMultihash(multihash, options.enc) + } catch (err) { + throw errCode(err, 'ERR_INVALID_MULTIHASH') + } + + try { + cid = new CID(mh) + } catch (err) { + throw errCode(err, 'ERR_INVALID_CID') + } + + if (options.cidVersion === 1) { + cid = cid.toV1() + } + + if (options.preload !== false) { + preload(cid) + } + + return ipld.get(cid) + } +} diff --git a/src/core/components/object/links.js b/src/core/components/object/links.js new file mode 100644 index 0000000000..8e6a58f177 --- /dev/null +++ b/src/core/components/object/links.js @@ -0,0 +1,58 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGLink = dagPB.DAGLink +const CID = require('cids') + +function findLinks (node, links = []) { + for (const key in node) { + const val = node[key] + + if (key === '/' && Object.keys(node).length === 1) { + try { + links.push(new DAGLink('', 0, new CID(val))) + continue + } catch (_) { + // not a CID + } + } + + if (CID.isCID(val)) { + links.push(new DAGLink('', 0, val)) + continue + } + + if (Array.isArray(val)) { + findLinks(val, links) + } + + if (val && typeof val === 'object') { + findLinks(val, links) + } + } + + return links +} + +module.exports = ({ dag }) => { + return async function links (multihash, options) { + options = options || {} + + const cid = new CID(multihash) + const result = await dag.get(cid, options) + + if (cid.codec === 'raw') { + return [] + } + + if (cid.codec === 'dag-pb') { + return result.value.Links + } + + if (cid.codec === 'dag-cbor') { + return findLinks(result) + } + + throw new Error(`Cannot resolve links from codec ${cid.codec}`) + } +} diff --git a/src/core/components/object/new.js b/src/core/components/object/new.js new file mode 100644 index 0000000000..4d6e6291b0 --- /dev/null +++ b/src/core/components/object/new.js @@ -0,0 +1,43 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const multicodec = require('multicodec') +const Unixfs = require('ipfs-unixfs') + +module.exports = ({ ipld, preload }) => { + return async function _new (template, options) { + options = options || {} + + // allow options in the template position + if (template && typeof template !== 'string') { + options = template + template = null + } + + let data + + if (template) { + if (template === 'unixfs-dir') { + data = (new Unixfs('directory')).marshal() + } else { + throw new Error('unknown template') + } + } else { + data = Buffer.alloc(0) + } + + const node = new DAGNode(data) + + const cid = await ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + preload(cid) + } + + return cid + } +} diff --git a/src/core/components/object/patch/add-link.js b/src/core/components/object/patch/add-link.js new file mode 100644 index 0000000000..2cdd990749 --- /dev/null +++ b/src/core/components/object/patch/add-link.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function addLink (multihash, link, options) { + const node = await get(multihash, options) + node.addLink(link) + return put(node, options) + } +} diff --git a/src/core/components/object/patch/append-data.js b/src/core/components/object/patch/append-data.js new file mode 100644 index 0000000000..511d79feb3 --- /dev/null +++ b/src/core/components/object/patch/append-data.js @@ -0,0 +1,14 @@ +'use strict' + +const { DAGNode } = require('ipld-dag-pb') + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function appendData (multihash, data, options) { + const node = await get(multihash, options) + const newData = Buffer.concat([node.Data, data]) + return put(new DAGNode(newData, node.Links), options) + } +} diff --git a/src/core/components/object/patch/rm-link.js b/src/core/components/object/patch/rm-link.js new file mode 100644 index 0000000000..bd3033a06b --- /dev/null +++ b/src/core/components/object/patch/rm-link.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function rmLink (multihash, linkRef, options) { + const node = await get(multihash, options) + node.rmLink(linkRef.Name || linkRef.name) + return put(node, options) + } +} diff --git a/src/core/components/object/patch/set-data.js b/src/core/components/object/patch/set-data.js new file mode 100644 index 0000000000..7693a5b5ba --- /dev/null +++ b/src/core/components/object/patch/set-data.js @@ -0,0 +1,13 @@ +'use strict' + +const { DAGNode } = require('ipld-dag-pb') + +module.exports = ({ ipld, gcLock, preload }) => { + const get = require('../get')({ ipld, preload }) + const put = require('../put')({ ipld, gcLock, preload }) + + return async function setData (multihash, data, options) { + const node = await get(multihash, options) + return put(new DAGNode(data, node.Links), options) + } +} diff --git a/src/core/components/object/put.js b/src/core/components/object/put.js new file mode 100644 index 0000000000..2a8a195f53 --- /dev/null +++ b/src/core/components/object/put.js @@ -0,0 +1,85 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const mh = require('multihashes') +const multicodec = require('multicodec') + +function parseBuffer (buf, encoding) { + switch (encoding) { + case 'json': + return parseJSONBuffer(buf) + case 'protobuf': + return parseProtoBuffer(buf) + default: + throw new Error(`unkown encoding: ${encoding}`) + } +} + +function parseJSONBuffer (buf) { + let data + let links + + try { + const parsed = JSON.parse(buf.toString()) + + links = (parsed.Links || []).map((link) => { + return new DAGLink( + link.Name || link.name, + link.Size || link.size, + mh.fromB58String(link.Hash || link.hash || link.multihash) + ) + }) + data = Buffer.from(parsed.Data) + } catch (err) { + throw new Error('failed to parse JSON: ' + err) + } + + return new DAGNode(data, links) +} + +function parseProtoBuffer (buf) { + return dagPB.util.deserialize(buf) +} + +module.exports = ({ ipld, gcLock, preload }) => { + return async function put (obj, options) { + options = options || {} + + const encoding = options.enc + let node + + if (Buffer.isBuffer(obj)) { + if (encoding) { + node = await parseBuffer(obj, encoding) + } else { + node = new DAGNode(obj) + } + } else if (DAGNode.isDAGNode(obj)) { + // already a dag node + node = obj + } else if (typeof obj === 'object') { + node = new DAGNode(obj.Data, obj.Links) + } else { + throw new Error('obj not recognized') + } + + const release = await gcLock.readLock() + + try { + const cid = await ipld.put(node, multicodec.DAG_PB, { + cidVersion: 0, + hashAlg: multicodec.SHA2_256 + }) + + if (options.preload !== false) { + preload(cid) + } + + return cid + } finally { + release() + } + } +} diff --git a/src/core/components/object/stat.js b/src/core/components/object/stat.js new file mode 100644 index 0000000000..ea2f06c72c --- /dev/null +++ b/src/core/components/object/stat.js @@ -0,0 +1,28 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') + +module.exports = ({ ipld, preload }) => { + const get = require('./get')({ ipld, preload }) + return async function stat (multihash, options) { + options = options || {} + + const node = await get(multihash, options) + const serialized = dagPB.util.serialize(node) + const cid = await dagPB.util.cid(serialized, { + cidVersion: 0 + }) + + const blockSize = serialized.length + const linkLength = node.Links.reduce((a, l) => a + l.Tsize, 0) + + return { + Hash: cid.toBaseEncodedString(), + NumLinks: node.Links.length, + BlockSize: blockSize, + LinksSize: blockSize - node.Data.length, + DataSize: node.Data.length, + CumulativeSize: blockSize + linkLength + } + } +} diff --git a/src/core/components/start.js b/src/core/components/start.js index 0fb44f22aa..5234820fa8 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -108,7 +108,20 @@ function createApi ({ repo }) { const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) - const object = Commands.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const object = { + data: Commands.object.data({ ipld, preload }), + get: Commands.object.get({ ipld, preload }), + links: Commands.object.links({ dag }), + new: Commands.object.new({ ipld, preload }), + patch: { + addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Commands.object.put({ ipld, gcLock, preload }), + stat: Commands.object.stat({ ipld, preload }) + } const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) @@ -123,6 +136,7 @@ function createApi ({ ipns, keychain, libp2p, + object, peerInfo, preload, print, diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 1e70484005..9be69ec1d6 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -77,7 +77,20 @@ function createApi ({ repo }) { const dag = Commands.legacy.dag({ _ipld: ipld, _preload: preload }) - const object = Commands.legacy.object({ _ipld: ipld, _preload: preload, dag, _gcLock: gcLock }) + const object = { + data: Commands.object.data({ ipld, preload }), + get: Commands.object.get({ ipld, preload }), + links: Commands.object.links({ dag }), + new: Commands.object.new({ ipld, preload }), + patch: { + addLink: Commands.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Commands.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Commands.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Commands.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Commands.object.put({ ipld, gcLock, preload }), + stat: Commands.object.stat({ ipld, preload }) + } const pin = Commands.legacy.pin({ _ipld: ipld, _preload: preload, object, _repo: repo, _pinManager: pinManager }) const add = Commands.add({ ipld, dag, preload, pin, gcLock, constructorOptions }) @@ -89,6 +102,7 @@ function createApi ({ initOptions, ipld, keychain, + object, peerInfo, pinManager, preload, From 7e932b1e95ef81e970d649c5badd76400bcffa18 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 16 Dec 2019 23:07:18 +0000 Subject: [PATCH 13/13] refactor: convert swarm API to async/await (#2681) * refactor: convert swarm API to async/await * refactor: use libp2p.connections --- src/core/components/index.js | 7 +++ src/core/components/init.js | 7 +++ src/core/components/start.js | 7 +++ src/core/components/stop.js | 7 +++ src/core/components/swarm.js | 79 ------------------------ src/core/components/swarm/addrs.js | 13 ++++ src/core/components/swarm/connect.js | 7 +++ src/core/components/swarm/disconnect.js | 7 +++ src/core/components/swarm/local-addrs.js | 7 +++ src/core/components/swarm/peers.js | 34 ++++++++++ 10 files changed, 96 insertions(+), 79 deletions(-) delete mode 100644 src/core/components/swarm.js create mode 100644 src/core/components/swarm/addrs.js create mode 100644 src/core/components/swarm/connect.js create mode 100644 src/core/components/swarm/disconnect.js create mode 100644 src/core/components/swarm/local-addrs.js create mode 100644 src/core/components/swarm/peers.js diff --git a/src/core/components/index.js b/src/core/components/index.js index 1abb7f4499..4a9bcab492 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -26,6 +26,13 @@ exports.object = { exports.ping = require('./ping') exports.start = require('./start') exports.stop = require('./stop') +exports.swarm = { + addrs: require('./swarm/addrs'), + connect: require('./swarm/connect'), + disconnect: require('./swarm/disconnect'), + localAddrs: require('./swarm/localAddrs'), + peers: require('./swarm/peers') +} exports.version = require('./version') exports.legacy = { // TODO: these will be removed as the new API is completed diff --git a/src/core/components/init.js b/src/core/components/init.js index e5b6788f63..91d457d7db 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -319,6 +319,13 @@ function createApi ({ init: () => { throw new AlreadyInitializedError() }, object, start, + swarm: { + addrs: () => { throw new NotStartedError() }, + connect: () => { throw new NotStartedError() }, + disconnect: () => { throw new NotStartedError() }, + localAddrs: Commands.swarm.localAddrs({ peerInfo }), + peers: () => { throw new NotStartedError() } + }, version: Commands.version({ repo }) } diff --git a/src/core/components/start.js b/src/core/components/start.js index 5234820fa8..2465be61f7 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -159,6 +159,13 @@ function createApi ({ : () => { throw new NotEnabledError('pubsub not enabled') }, start: () => apiManager.api, stop, + swarm: { + addrs: () => Commands.swarm.addrs({ libp2p }), + connect: () => Commands.swarm.connect({ libp2p }), + disconnect: () => Commands.swarm.disconnect({ libp2p }), + localAddrs: Commands.swarm.localAddrs({ peerInfo }), + peers: () => Commands.swarm.peers({ libp2p }) + }, version: Commands.version({ repo }) } diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 9be69ec1d6..3bd422f297 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -117,6 +117,13 @@ function createApi ({ init: () => { throw new AlreadyInitializedError() }, start, stop: () => apiManager.api, + swarm: { + addrs: () => { throw new NotStartedError() }, + connect: () => { throw new NotStartedError() }, + disconnect: () => { throw new NotStartedError() }, + localAddrs: Commands.swarm.localAddrs({ peerInfo }), + peers: () => { throw new NotStartedError() } + }, version: Commands.version({ repo }) } diff --git a/src/core/components/swarm.js b/src/core/components/swarm.js deleted file mode 100644 index 45d1b8ebe5..0000000000 --- a/src/core/components/swarm.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict' - -const callbackify = require('callbackify') -const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR - -module.exports = function swarm (self) { - return { - peers: callbackify.variadic(async (opts) => { // eslint-disable-line require-await - opts = opts || {} - - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const verbose = opts.v || opts.verbose - // TODO: return latency and streams when verbose is set - // we currently don't have this information - - const peers = [] - - Object.values(self._peerInfoBook.getAll()).forEach((peer) => { - const connectedAddr = peer.isConnected() - - if (!connectedAddr) { return } - - const tupple = { - addr: connectedAddr, - peer: peer.id - } - if (verbose) { - tupple.latency = 'n/a' - } - - peers.push(tupple) - }) - - return peers - }), - - // all the addrs we know - addrs: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - const peers = Object.values(self._peerInfoBook.getAll()) - - return peers - }), - - localAddrs: callbackify(async () => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.peerInfo.multiaddrs.toArray() - }), - - connect: callbackify(async (maddr) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.dial(maddr) - }), - - disconnect: callbackify(async (maddr) => { // eslint-disable-line require-await - if (!self.isOnline()) { - throw new Error(OFFLINE_ERROR) - } - - return self.libp2p.hangUp(maddr) - }), - - filters: callbackify(async () => { // eslint-disable-line require-await - throw new Error('Not implemented') - }) - } -} diff --git a/src/core/components/swarm/addrs.js b/src/core/components/swarm/addrs.js new file mode 100644 index 0000000000..ee095be07a --- /dev/null +++ b/src/core/components/swarm/addrs.js @@ -0,0 +1,13 @@ +'use strict' + +const CID = require('cids') + +module.exports = ({ libp2p }) => { + return async function addrs () { // eslint-disable-line require-await + const peers = [] + for (const [peerId, peerInfo] of libp2p.peerStore.entries()) { + peers.push({ id: new CID(peerId), addrs: peerInfo.multiaddrs.toArray() }) + } + return peers + } +} diff --git a/src/core/components/swarm/connect.js b/src/core/components/swarm/connect.js new file mode 100644 index 0000000000..98f7217f71 --- /dev/null +++ b/src/core/components/swarm/connect.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ libp2p }) => { + return function connect (addr) { + return libp2p.dial(addr) + } +} diff --git a/src/core/components/swarm/disconnect.js b/src/core/components/swarm/disconnect.js new file mode 100644 index 0000000000..3e9aadae52 --- /dev/null +++ b/src/core/components/swarm/disconnect.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ libp2p }) => { + return function disconnect (addr) { + return libp2p.hangUp(addr) + } +} diff --git a/src/core/components/swarm/local-addrs.js b/src/core/components/swarm/local-addrs.js new file mode 100644 index 0000000000..bc2ee7df71 --- /dev/null +++ b/src/core/components/swarm/local-addrs.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = ({ peerInfo }) => { + return async function localAddrs () { // eslint-disable-line require-await + return peerInfo.multiaddrs.toArray() + } +} diff --git a/src/core/components/swarm/peers.js b/src/core/components/swarm/peers.js new file mode 100644 index 0000000000..3fbc45c9c8 --- /dev/null +++ b/src/core/components/swarm/peers.js @@ -0,0 +1,34 @@ +'use strict' + +const CID = require('cids') + +module.exports = ({ libp2p }) => { + return async function peers (options) { // eslint-disable-line require-await + options = options || {} + + const verbose = options.v || options.verbose + const peers = [] + + for (const [peerId, connections] of libp2p.connections) { + for (const connection of connections) { + const tupple = { + addr: connection.remoteAddr, + peer: new CID(peerId) + } + + if (verbose || options.direction) { + tupple.direction = connection.stat.direction + } + + if (verbose) { + tupple.muxer = connection.stat.multiplexer + tupple.latency = 'n/a' + } + + peers.push(tupple) + } + } + + return peers + } +}