From e44316044fd993cf2b1a08259d5220f58ac70fac Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 15 Feb 2021 11:19:08 +0000 Subject: [PATCH] chore: upgrade all deps to typed versions - Uses default aegir ts config - Fixes all ts errors - Fully types core-api in ipfs-core-types package - Makes ipfs-core implement types from ipfs-core-types package - Removes duplicate types, ipfs-core-types as single source of type truth - Reduces use of external APIs by internal components in ipfs-core --- .gitignore | 1 + examples/browser-browserify/src/index.js | 2 +- examples/browser-ipns-publish/package.json | 2 +- examples/custom-ipfs-repo/package.json | 2 +- examples/traverse-ipld-graphs/package.json | 2 +- packages/interface-ipfs-core/package.json | 18 +- packages/interface-ipfs-core/src/cat.js | 2 +- packages/interface-ipfs-core/src/dag/get.js | 6 +- packages/interface-ipfs-core/src/files/cp.js | 1 + .../interface-ipfs-core/src/files/touch.js | 2 +- packages/interface-ipfs-core/src/get.js | 2 +- .../interface-ipfs-core/src/object/get.js | 4 +- .../interface-ipfs-core/src/refs-local.js | 2 +- packages/interface-ipfs-core/src/refs.js | 2 +- .../src/utils/is-shard-at-path.js | 2 +- packages/ipfs-cli/package.json | 8 +- packages/ipfs-cli/tsconfig.json | 5 +- packages/ipfs-client/tsconfig.json | 13 +- packages/ipfs-core-types/package.json | 3 +- .../src/{files.ts => basic.d.ts} | 142 ++++--- packages/ipfs-core-types/src/basic.ts | 39 -- packages/ipfs-core-types/src/bitswap.ts | 99 ----- .../ipfs-core-types/src/bitswap/index.d.ts | 23 ++ .../src/bitswap/moving-avarage.ts | 9 - packages/ipfs-core-types/src/block-service.ts | 25 -- packages/ipfs-core-types/src/block-store.ts | 10 - packages/ipfs-core-types/src/block/index.d.ts | 75 ++++ .../ipfs-core-types/src/bootstrap/index.ts | 10 + .../ipfs-core-types/src/config/index.d.ts | 120 ++++++ .../src/config/profiles/index.d.ts | 28 ++ packages/ipfs-core-types/src/dag/index.d.ts | 100 +++++ packages/ipfs-core-types/src/datastore.ts | 184 --------- packages/ipfs-core-types/src/dht/index.d.ts | 44 +++ packages/ipfs-core-types/src/files/index.d.ts | 329 ++++++++++++++++ packages/ipfs-core-types/src/index.d.ts | 49 +++ packages/ipfs-core-types/src/index.ts | 10 - packages/ipfs-core-types/src/ipld.ts | 42 -- packages/ipfs-core-types/src/ipld/format.ts | 42 -- packages/ipfs-core-types/src/key/index.d.ts | 28 ++ packages/ipfs-core-types/src/name/index.d.ts | 94 +++++ .../src/name/pubsub/index.d.ts | 16 + .../ipfs-core-types/src/object/index.d.ts | 34 ++ .../src/object/patch/index.d.ts | 10 + packages/ipfs-core-types/src/pin/index.d.ts | 92 +++++ .../src/pin/{remote.ts => remote/index.d.ts} | 4 +- .../remote/{service.ts => service/index.d.ts} | 2 +- packages/ipfs-core-types/src/preload.ts | 3 - .../ipfs-core-types/src/pubsub/index.d.ts | 18 + packages/ipfs-core-types/src/refs/index.d.ts | 24 ++ packages/ipfs-core-types/src/repo.ts | 102 ----- packages/ipfs-core-types/src/repo/index.d.ts | 35 ++ .../src/{root.ts => root.d.ts} | 112 +++++- packages/ipfs-core-types/src/stats/index.d.ts | 26 ++ packages/ipfs-core-types/src/store.ts | 124 ------ packages/ipfs-core-types/src/swarm/index.d.ts | 41 ++ packages/ipfs-core-types/tsconfig.json | 5 +- packages/ipfs-core-utils/package.json | 3 +- .../ipfs-core-utils/src/files/format-mode.js | 9 +- .../ipfs-core-utils/src/files/format-mtime.js | 4 +- .../files/normalise-input/index.browser.js | 10 +- .../src/files/normalise-input/index.js | 7 +- .../normalise-content.browser.js | 4 +- .../normalise-input/normalise-content.js | 5 +- .../files/normalise-input/normalise-input.js | 27 +- .../src/files/normalise-input/utils.js | 5 +- .../src/pins/normalise-input.js | 56 +-- packages/ipfs-core-utils/src/to-url-string.js | 1 + .../src/with-timeout-option.js | 3 + packages/ipfs-core-utils/tsconfig.json | 5 +- packages/ipfs-core/package.json | 41 +- .../ipfs-core/src/components/add-all/index.js | 63 ++- packages/ipfs-core/src/components/add.js | 16 +- .../ipfs-core/src/components/bitswap/index.js | 14 +- .../ipfs-core/src/components/bitswap/stat.js | 43 +-- .../src/components/bitswap/unwant.js | 27 +- .../components/bitswap/wantlist-for-peer.js | 29 +- .../src/components/bitswap/wantlist.js | 21 +- .../ipfs-core/src/components/block/get.js | 25 +- .../ipfs-core/src/components/block/index.js | 19 +- .../ipfs-core/src/components/block/put.js | 65 +--- packages/ipfs-core/src/components/block/rm.js | 48 +-- .../ipfs-core/src/components/block/stat.js | 33 +- .../ipfs-core/src/components/bootstrap/add.js | 37 +- .../src/components/bootstrap/clear.js | 21 +- .../src/components/bootstrap/index.js | 9 +- .../src/components/bootstrap/list.js | 20 +- .../src/components/bootstrap/reset.js | 21 +- .../ipfs-core/src/components/bootstrap/rm.js | 23 +- .../src/components/bootstrap/utils.js | 9 - packages/ipfs-core/src/components/cat.js | 14 +- packages/ipfs-core/src/components/config.js | 363 ++---------------- packages/ipfs-core/src/components/dag/get.js | 68 +--- .../ipfs-core/src/components/dag/index.js | 65 +--- packages/ipfs-core/src/components/dag/put.js | 86 ++--- .../ipfs-core/src/components/dag/resolve.js | 47 +-- packages/ipfs-core/src/components/dag/tree.js | 55 +-- packages/ipfs-core/src/components/dht.js | 108 +----- packages/ipfs-core/src/components/dns.js | 17 +- .../ipfs-core/src/components/files/chmod.js | 130 +++++-- packages/ipfs-core/src/components/files/cp.js | 109 ++++-- .../ipfs-core/src/components/files/flush.js | 30 +- .../ipfs-core/src/components/files/index.js | 70 ++-- packages/ipfs-core/src/components/files/ls.js | 96 ++--- .../ipfs-core/src/components/files/mkdir.js | 78 ++-- packages/ipfs-core/src/components/files/mv.js | 70 ++-- .../ipfs-core/src/components/files/read.js | 48 +-- packages/ipfs-core/src/components/files/rm.js | 46 ++- .../ipfs-core/src/components/files/stat.js | 157 ++++---- .../ipfs-core/src/components/files/touch.js | 60 ++- .../src/components/files/utils/add-link.js | 150 ++++++-- .../src/components/files/utils/create-lock.js | 13 +- .../src/components/files/utils/create-node.js | 19 +- .../src/components/files/utils/hamt-utils.js | 108 +++++- .../src/components/files/utils/remove-link.js | 132 +++++-- .../files/utils/to-async-iterator.js | 8 + .../src/components/files/utils/to-mfs-path.js | 214 +++++++---- .../files/utils/to-path-components.js | 3 + .../files/utils/to-sources-and-destination.js | 9 + .../src/components/files/utils/to-sources.js | 17 +- .../src/components/files/utils/to-trail.js | 33 +- .../src/components/files/utils/types.ts | 12 - .../components/files/utils/update-mfs-root.js | 9 + .../src/components/files/utils/update-tree.js | 21 +- .../components/files/utils/with-mfs-root.js | 12 +- .../ipfs-core/src/components/files/write.js | 146 +++++-- packages/ipfs-core/src/components/gc-lock.js | 1 + packages/ipfs-core/src/components/get.js | 11 +- packages/ipfs-core/src/components/id.js | 16 +- packages/ipfs-core/src/components/index.js | 164 ++------ packages/ipfs-core/src/components/ipld.js | 20 +- packages/ipfs-core/src/components/ipns.js | 61 ++- .../ipfs-core/src/components/is-online.js | 2 +- .../ipfs-core/src/components/key/export.js | 19 +- packages/ipfs-core/src/components/key/gen.js | 36 +- .../ipfs-core/src/components/key/import.js | 17 +- .../ipfs-core/src/components/key/index.js | 13 +- packages/ipfs-core/src/components/key/info.js | 10 +- packages/ipfs-core/src/components/key/list.js | 19 +- .../ipfs-core/src/components/key/rename.js | 28 +- packages/ipfs-core/src/components/key/rm.js | 22 +- packages/ipfs-core/src/components/libp2p.js | 29 +- packages/ipfs-core/src/components/ls.js | 35 +- .../ipfs-core/src/components/name/index.js | 40 +- .../ipfs-core/src/components/name/publish.js | 77 +--- .../src/components/name/pubsub/cancel.js | 23 +- .../src/components/name/pubsub/index.js | 10 +- .../src/components/name/pubsub/state.js | 20 +- .../src/components/name/pubsub/subs.js | 21 +- .../src/components/name/pubsub/utils.js | 18 +- .../ipfs-core/src/components/name/resolve.js | 40 +- .../ipfs-core/src/components/name/utils.js | 16 +- packages/ipfs-core/src/components/network.js | 47 +-- .../ipfs-core/src/components/object/data.js | 14 +- .../ipfs-core/src/components/object/get.js | 51 +-- .../ipfs-core/src/components/object/index.js | 22 +- .../ipfs-core/src/components/object/links.js | 15 +- .../ipfs-core/src/components/object/new.js | 32 +- .../src/components/object/patch/add-link.js | 7 +- .../components/object/patch/append-data.js | 8 +- .../src/components/object/patch/index.js | 16 +- .../src/components/object/patch/rm-link.js | 8 +- .../src/components/object/patch/set-data.js | 7 +- .../ipfs-core/src/components/object/put.js | 50 +-- .../ipfs-core/src/components/object/stat.js | 23 +- .../ipfs-core/src/components/pin/add-all.js | 59 +-- packages/ipfs-core/src/components/pin/add.js | 33 +- .../ipfs-core/src/components/pin/index.js | 39 +- packages/ipfs-core/src/components/pin/ls.js | 70 +--- .../src/components/pin/pin-manager.js | 116 +++--- .../ipfs-core/src/components/pin/rm-all.js | 31 +- packages/ipfs-core/src/components/pin/rm.js | 33 +- packages/ipfs-core/src/components/ping.js | 20 +- packages/ipfs-core/src/components/pubsub.js | 98 +---- .../ipfs-core/src/components/refs/index.js | 135 ++++--- .../ipfs-core/src/components/refs/local.js | 5 +- packages/ipfs-core/src/components/repo/gc.js | 67 ++-- .../ipfs-core/src/components/repo/index.js | 16 +- .../ipfs-core/src/components/repo/stat.js | 8 +- .../ipfs-core/src/components/repo/version.js | 10 +- packages/ipfs-core/src/components/resolve.js | 57 +-- packages/ipfs-core/src/components/root.js | 2 - packages/ipfs-core/src/components/start.js | 23 +- packages/ipfs-core/src/components/stats/bw.js | 47 ++- .../ipfs-core/src/components/stats/index.js | 13 +- packages/ipfs-core/src/components/stop.js | 15 +- packages/ipfs-core/src/components/storage.js | 170 +++----- .../ipfs-core/src/components/swarm/addrs.js | 16 +- .../ipfs-core/src/components/swarm/connect.js | 8 +- .../src/components/swarm/disconnect.js | 8 +- .../ipfs-core/src/components/swarm/index.js | 8 +- .../src/components/swarm/local-addrs.js | 11 +- .../ipfs-core/src/components/swarm/peers.js | 43 +-- packages/ipfs-core/src/components/version.js | 30 +- packages/ipfs-core/src/ipns/index.js | 45 ++- packages/ipfs-core/src/ipns/publisher.js | 66 +++- packages/ipfs-core/src/ipns/republisher.js | 26 +- packages/ipfs-core/src/ipns/resolver.js | 31 +- packages/ipfs-core/src/ipns/routing/config.js | 7 + .../src/ipns/routing/offline-datastore.js | 12 +- .../src/ipns/routing/pubsub-datastore.js | 34 +- packages/ipfs-core/src/ipns/routing/utils.js | 13 +- packages/ipfs-core/src/mfs-preload.js | 22 +- packages/ipfs-core/src/preload.js | 20 +- packages/ipfs-core/src/runtime/dns-browser.js | 13 + packages/ipfs-core/src/runtime/dns-nodejs.js | 15 +- .../src/runtime/init-assets-nodejs.js | 13 +- packages/ipfs-core/src/runtime/ipld.js | 23 +- .../ipfs-core/src/runtime/libp2p-browser.js | 3 + .../ipfs-core/src/runtime/libp2p-nodejs.js | 4 + .../runtime/libp2p-pubsub-routers-nodejs.js | 1 + .../ipfs-core/src/runtime/preload-browser.js | 4 + .../ipfs-core/src/runtime/preload-nodejs.js | 4 + .../ipfs-core/src/runtime/repo-browser.js | 8 +- packages/ipfs-core/src/runtime/repo-nodejs.js | 30 +- packages/ipfs-core/src/types.d.ts | 229 +++++++++++ packages/ipfs-core/src/utils.js | 40 +- packages/ipfs-core/src/utils/service.js | 57 +-- packages/ipfs-core/tsconfig.json | 5 +- packages/ipfs-daemon/tsconfig.json | 5 +- packages/ipfs-grpc-client/package.json | 5 +- packages/ipfs-grpc-client/tsconfig.json | 5 +- packages/ipfs-grpc-server/tsconfig.json | 5 +- packages/ipfs-http-client/package.json | 11 +- packages/ipfs-http-client/src/object/put.js | 3 +- packages/ipfs-http-client/tsconfig.json | 9 +- packages/ipfs-http-gateway/package.json | 2 +- packages/ipfs-http-gateway/tsconfig.json | 9 +- packages/ipfs-http-server/package.json | 8 +- .../ipfs-http-server/test/inject/object.js | 6 +- packages/ipfs-http-server/tsconfig.json | 5 +- .../ipfs-message-port-client/package.json | 1 - .../ipfs-message-port-client/tsconfig.json | 5 +- .../ipfs-message-port-protocol/package.json | 1 + .../ipfs-message-port-protocol/tsconfig.json | 5 +- .../ipfs-message-port-server/package.json | 2 +- .../ipfs-message-port-server/src/block.js | 12 +- packages/ipfs-message-port-server/src/core.js | 42 +- packages/ipfs-message-port-server/src/dag.js | 15 +- .../ipfs-message-port-server/src/files.js | 10 +- packages/ipfs-message-port-server/src/ipfs.ts | 30 +- .../ipfs-message-port-server/src/service.js | 3 +- .../ipfs-message-port-server/tsconfig.json | 5 +- packages/ipfs/package.json | 3 +- packages/ipfs/tsconfig.json | 9 +- tsconfig.json | 84 ---- 245 files changed, 4362 insertions(+), 4497 deletions(-) rename packages/ipfs-core-types/src/{files.ts => basic.d.ts} (52%) delete mode 100644 packages/ipfs-core-types/src/basic.ts delete mode 100644 packages/ipfs-core-types/src/bitswap.ts create mode 100644 packages/ipfs-core-types/src/bitswap/index.d.ts delete mode 100644 packages/ipfs-core-types/src/bitswap/moving-avarage.ts delete mode 100644 packages/ipfs-core-types/src/block-service.ts delete mode 100644 packages/ipfs-core-types/src/block-store.ts create mode 100644 packages/ipfs-core-types/src/block/index.d.ts create mode 100644 packages/ipfs-core-types/src/bootstrap/index.ts create mode 100644 packages/ipfs-core-types/src/config/index.d.ts create mode 100644 packages/ipfs-core-types/src/config/profiles/index.d.ts create mode 100644 packages/ipfs-core-types/src/dag/index.d.ts delete mode 100644 packages/ipfs-core-types/src/datastore.ts create mode 100644 packages/ipfs-core-types/src/dht/index.d.ts create mode 100644 packages/ipfs-core-types/src/files/index.d.ts create mode 100644 packages/ipfs-core-types/src/index.d.ts delete mode 100644 packages/ipfs-core-types/src/index.ts delete mode 100644 packages/ipfs-core-types/src/ipld.ts delete mode 100644 packages/ipfs-core-types/src/ipld/format.ts create mode 100644 packages/ipfs-core-types/src/key/index.d.ts create mode 100644 packages/ipfs-core-types/src/name/index.d.ts create mode 100644 packages/ipfs-core-types/src/name/pubsub/index.d.ts create mode 100644 packages/ipfs-core-types/src/object/index.d.ts create mode 100644 packages/ipfs-core-types/src/object/patch/index.d.ts create mode 100644 packages/ipfs-core-types/src/pin/index.d.ts rename packages/ipfs-core-types/src/pin/{remote.ts => remote/index.d.ts} (95%) rename packages/ipfs-core-types/src/pin/remote/{service.ts => service/index.d.ts} (96%) delete mode 100644 packages/ipfs-core-types/src/preload.ts create mode 100644 packages/ipfs-core-types/src/pubsub/index.d.ts create mode 100644 packages/ipfs-core-types/src/refs/index.d.ts delete mode 100644 packages/ipfs-core-types/src/repo.ts create mode 100644 packages/ipfs-core-types/src/repo/index.d.ts rename packages/ipfs-core-types/src/{root.ts => root.d.ts} (50%) create mode 100644 packages/ipfs-core-types/src/stats/index.d.ts delete mode 100644 packages/ipfs-core-types/src/store.ts create mode 100644 packages/ipfs-core-types/src/swarm/index.d.ts delete mode 100644 packages/ipfs-core/src/components/files/utils/types.ts create mode 100644 packages/ipfs-core/src/types.d.ts delete mode 100644 tsconfig.json diff --git a/.gitignore b/.gitignore index 0f7d8b73b7..210fed8960 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,7 @@ node_modules dist build bundle.js +tsconfig-types.aegir.json # Deployment files .npmrc diff --git a/examples/browser-browserify/src/index.js b/examples/browser-browserify/src/index.js index 96836f860c..c99eb77391 100644 --- a/examples/browser-browserify/src/index.js +++ b/examples/browser-browserify/src/index.js @@ -25,7 +25,7 @@ document.addEventListener('DOMContentLoaded', async () => { async function display (cid) { for await (const data of node.cat(cid)) { document.getElementById('cid').innerText = cid - document.getElementById('content').innerText = data + document.getElementById('content').innerText = new TextDecoder().decode(data) document.getElementById('output').setAttribute('style', 'display: block') } } diff --git a/examples/browser-ipns-publish/package.json b/examples/browser-ipns-publish/package.json index 583c291f86..9a0b4f43ff 100644 --- a/examples/browser-ipns-publish/package.json +++ b/examples/browser-ipns-publish/package.json @@ -16,7 +16,7 @@ "ipfs": "^0.54.4", "ipfs-http-client": "^49.0.4", "ipfs-utils": "^6.0.1", - "ipns": "^0.8.0", + "ipns": "^0.10.0", "it-last": "^1.0.4", "p-retry": "^4.2.0", "uint8arrays": "^2.1.3" diff --git a/examples/custom-ipfs-repo/package.json b/examples/custom-ipfs-repo/package.json index 27bb5ded62..ad4dbb7251 100644 --- a/examples/custom-ipfs-repo/package.json +++ b/examples/custom-ipfs-repo/package.json @@ -12,7 +12,7 @@ "dependencies": { "datastore-fs": "^2.0.0", "ipfs": "^0.54.4", - "ipfs-repo": "^8.0.0", + "ipfs-repo": "^9.0.0", "it-all": "^1.0.4" }, "devDependencies": { diff --git a/examples/traverse-ipld-graphs/package.json b/examples/traverse-ipld-graphs/package.json index a6998d74a3..0eeaac3e1f 100644 --- a/examples/traverse-ipld-graphs/package.json +++ b/examples/traverse-ipld-graphs/package.json @@ -16,7 +16,7 @@ "cids": "^1.1.5", "ipfs": "^0.54.4", "ipld-block": "^0.11.0", - "ipld-dag-pb": "^0.20.0", + "ipld-dag-pb": "^0.22.0", "ipld-git": "^0.6.1", "ipld-ethereum": "^5.0.1", "multihashing-async": "^2.1.2" diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json index c822d27599..7155b685ab 100644 --- a/packages/interface-ipfs-core/package.json +++ b/packages/interface-ipfs-core/package.json @@ -43,20 +43,20 @@ "chai-as-promised": "^7.1.1", "chai-subset": "^1.6.0", "cids": "^1.1.5", - "delay": "^4.4.0", + "delay": "^5.0.0", "dirty-chai": "^2.0.1", - "err-code": "^2.0.3", - "ipfs-unixfs": "^2.0.3", - "ipfs-unixfs-importer": "^5.0.0", + "err-code": "^3.0.1", + "ipfs-unixfs": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs?fix/declare-interface-types-in-d-ts", + "ipfs-unixfs-importer": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs-importer?fix/declare-interface-types-in-d-ts", "ipfs-utils": "^6.0.1", "ipld-block": "^0.11.0", - "ipld-dag-cbor": "^0.17.0", - "ipld-dag-pb": "^0.20.0", - "ipns": "^0.8.0", - "is-ipfs": "^2.0.0", + "ipld-dag-cbor": "^0.18.0", + "ipld-dag-pb": "^0.22.0", + "ipns": "^0.10.0", + "is-ipfs": "^4.0.0", "iso-random-stream": "^1.1.1", "it-all": "^1.0.4", - "it-buffer-stream": "^1.0.5", + "it-buffer-stream": "^2.0.0", "it-concat": "^1.0.1", "it-drain": "^1.0.3", "it-first": "^1.0.4", diff --git a/packages/interface-ipfs-core/src/cat.js b/packages/interface-ipfs-core/src/cat.js index 779f22c309..b44759e776 100644 --- a/packages/interface-ipfs-core/src/cat.js +++ b/packages/interface-ipfs-core/src/cat.js @@ -10,7 +10,7 @@ const all = require('it-all') const drain = require('it-drain') const { getDescribe, getIt, expect } = require('./utils/mocha') const testTimeout = require('./utils/test-timeout') -const importer = require('ipfs-unixfs-importer') +const { importer } = require('ipfs-unixfs-importer') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** diff --git a/packages/interface-ipfs-core/src/dag/get.js b/packages/interface-ipfs-core/src/dag/get.js index fee4bf2458..4bff6a9528 100644 --- a/packages/interface-ipfs-core/src/dag/get.js +++ b/packages/interface-ipfs-core/src/dag/get.js @@ -5,8 +5,8 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') -const importer = require('ipfs-unixfs-importer') -const Unixfs = require('ipfs-unixfs') +const { importer } = require('ipfs-unixfs-importer') +const { UnixFS } = require('ipfs-unixfs') const all = require('it-all') const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -193,7 +193,7 @@ module.exports = (common, options) => { const cidv0 = cidv1.toV0() const output = await ipfs.dag.get(cidv0) - expect(Unixfs.unmarshal(output.value.Data).data).to.eql(input) + expect(UnixFS.unmarshal(output.value.Data).data).to.eql(input) }) it('should be able to get part of a dag-cbor node', async () => { diff --git a/packages/interface-ipfs-core/src/files/cp.js b/packages/interface-ipfs-core/src/files/cp.js index 7e9880ee37..15ad773aa4 100644 --- a/packages/interface-ipfs-core/src/files/cp.js +++ b/packages/interface-ipfs-core/src/files/cp.js @@ -80,6 +80,7 @@ module.exports = (common, options) => { await ipfs.files.write(src1, [], { create: true }) + await expect(ipfs.files.cp(src1, `${parent}/child`)).to.eventually.be.rejectedWith(Error) .that.has.property('message').that.matches(/"identity"/) }) diff --git a/packages/interface-ipfs-core/src/files/touch.js b/packages/interface-ipfs-core/src/files/touch.js index 1149be459c..4dcd99e106 100644 --- a/packages/interface-ipfs-core/src/files/touch.js +++ b/packages/interface-ipfs-core/src/files/touch.js @@ -33,7 +33,7 @@ module.exports = (common, options) => { }) const stat2 = await ipfs.files.stat(testPath) - expect(stat2).to.have.nested.deep.property('mtime', expectedMtime) + expect(stat2).to.have.deep.nested.property('mtime', expectedMtime) } before(async () => { ipfs = (await common.spawn()).api }) diff --git a/packages/interface-ipfs-core/src/get.js b/packages/interface-ipfs-core/src/get.js index 2d05173796..4f345954d1 100644 --- a/packages/interface-ipfs-core/src/get.js +++ b/packages/interface-ipfs-core/src/get.js @@ -12,7 +12,7 @@ const last = require('it-last') const map = require('it-map') const { getDescribe, getIt, expect } = require('./utils/mocha') const testTimeout = require('./utils/test-timeout') -const importer = require('ipfs-unixfs-importer') +const { importer } = require('ipfs-unixfs-importer') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** diff --git a/packages/interface-ipfs-core/src/object/get.js b/packages/interface-ipfs-core/src/object/get.js index 97b4472c3c..b81ddb6a82 100644 --- a/packages/interface-ipfs-core/src/object/get.js +++ b/packages/interface-ipfs-core/src/object/get.js @@ -5,7 +5,7 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const { nanoid } = require('nanoid') const { getDescribe, getIt, expect } = require('../utils/mocha') -const UnixFs = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const randomBytes = require('iso-random-stream/src/random') const { asDAGLink } = require('./utils') const testTimeout = require('../utils/test-timeout') @@ -147,7 +147,7 @@ module.exports = (common, options) => { }) const node = await ipfs.object.get(result.cid) - const meta = UnixFs.unmarshal(node.Data) + const meta = UnixFS.unmarshal(node.Data) expect(meta.fileSize()).to.equal(data.length) }) diff --git a/packages/interface-ipfs-core/src/refs-local.js b/packages/interface-ipfs-core/src/refs-local.js index 9a46f0266d..6824fbbdf6 100644 --- a/packages/interface-ipfs-core/src/refs-local.js +++ b/packages/interface-ipfs-core/src/refs-local.js @@ -4,7 +4,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('./utils/mocha') const all = require('it-all') -const importer = require('ipfs-unixfs-importer') +const { importer } = require('ipfs-unixfs-importer') const drain = require('it-drain') const testTimeout = require('./utils/test-timeout') const CID = require('cids') diff --git a/packages/interface-ipfs-core/src/refs.js b/packages/interface-ipfs-core/src/refs.js index 757c395b0b..422cda18a5 100644 --- a/packages/interface-ipfs-core/src/refs.js +++ b/packages/interface-ipfs-core/src/refs.js @@ -12,7 +12,7 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const DAGLink = dagPB.DAGLink -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** diff --git a/packages/interface-ipfs-core/src/utils/is-shard-at-path.js b/packages/interface-ipfs-core/src/utils/is-shard-at-path.js index f2be8ee480..a0a48c63c7 100644 --- a/packages/interface-ipfs-core/src/utils/is-shard-at-path.js +++ b/packages/interface-ipfs-core/src/utils/is-shard-at-path.js @@ -1,6 +1,6 @@ 'use strict' -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') module.exports = async (path, ipfs) => { const stats = await ipfs.files.stat(path) diff --git a/packages/ipfs-cli/package.json b/packages/ipfs-cli/package.json index f4586ca0a9..60bc6e8129 100644 --- a/packages/ipfs-cli/package.json +++ b/packages/ipfs-cli/package.json @@ -32,17 +32,17 @@ "cid-tool": "^1.0.0", "cids": "^1.1.5", "debug": "^4.1.1", - "err-code": "^2.0.3", + "err-code": "^3.0.1", "execa": "^5.0.0", "get-folder-size": "^2.0.1", "ipfs-core": "^0.5.4", "ipfs-core-utils": "^0.7.2", "ipfs-daemon": "^0.5.4", "ipfs-http-client": "^49.0.4", - "ipfs-repo": "^8.0.0", + "ipfs-repo": "^9.0.0", "ipfs-utils": "^6.0.1", - "ipld-dag-cbor": "^0.17.0", - "ipld-dag-pb": "^0.20.0", + "ipld-dag-cbor": "^0.18.0", + "ipld-dag-pb": "^0.22.0", "it-all": "^1.0.4", "it-concat": "^1.0.1", "it-first": "^1.0.4", diff --git a/packages/ipfs-cli/tsconfig.json b/packages/ipfs-cli/tsconfig.json index f9f47fbb7d..27f056c6cc 100644 --- a/packages/ipfs-cli/tsconfig.json +++ b/packages/ipfs-cli/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-client/tsconfig.json b/packages/ipfs-client/tsconfig.json index b45232f56a..a64e22c5b5 100644 --- a/packages/ipfs-client/tsconfig.json +++ b/packages/ipfs-client/tsconfig.json @@ -1,8 +1,13 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, + "include": [ + "src", + "package.json" + ], "references": [ { "path": "../ipfs-http-client" @@ -10,9 +15,5 @@ { "path": "../ipfs-grpc-client" } - ], - "include": [ - "src", - "package.json" ] } diff --git a/packages/ipfs-core-types/package.json b/packages/ipfs-core-types/package.json index 65d4518953..aebf55048c 100644 --- a/packages/ipfs-core-types/package.json +++ b/packages/ipfs-core-types/package.json @@ -3,7 +3,7 @@ "version": "0.3.1", "description": "IPFS interface definitions used by implementations for API compatibility.", "leadMaintainer": "Alex Potsides ", - "types": "src/index.ts", + "types": "src/index.d.ts", "homepage": "https://github.com/ipfs/js-ipfs/tree/master/packages/interface-ipfs-core#readme", "bugs": "https://github.com/ipfs/js-ipfs/issues", "scripts": { @@ -30,6 +30,7 @@ "dependencies": { "cids": "^1.1.5", "multiaddr": "^8.0.0", + "multibase": "^4.0.2", "peer-id": "^0.14.1" }, "devDependencies": { diff --git a/packages/ipfs-core-types/src/files.ts b/packages/ipfs-core-types/src/basic.d.ts similarity index 52% rename from packages/ipfs-core-types/src/files.ts rename to packages/ipfs-core-types/src/basic.d.ts index e9f45c7d5b..ebfdebd788 100644 --- a/packages/ipfs-core-types/src/files.ts +++ b/packages/ipfs-core-types/src/basic.d.ts @@ -1,5 +1,6 @@ import CID from 'cids' import { AwaitIterable } from './basic' +import { Mtime } from 'ipfs-unixfs' export type Entry|Blob> = | FileEntry @@ -7,8 +8,8 @@ export type Entry|Blob> = export interface BaseEntry { path: string - mode?: Mode - mtime?: MTime + mode?: number + mtime?: Mtime } export interface FileEntry |Blob> extends BaseEntry { content?: Content @@ -43,7 +44,7 @@ export interface ToFileMetadata { } /** - * File content in arbitrary (supported) represenation. It is used in input + * File content in arbitrary (supported) representation. It is used in input * positions and is usually normalized to `Blob` in browser contexts and * `AsyncIterable` in node. */ @@ -58,7 +59,7 @@ export type ToContent = /** * Timestamp representation in arbitrary (supported) in representations. It is - * used in input positions and usurally get's normalised to `MTime` before use. + * used in input positions and usually gets normalised to `MTime` before use. */ export type ToMTime = | Date @@ -69,41 +70,6 @@ export type ToMode = | string | number -export interface File { - readonly type: 'file' - readonly cid: CID - readonly name: string - - /** - * File path - */ - readonly path: string - /** - * File content - */ - readonly content?: AsyncIterable - mode?: Mode - mtime?: MTime - size?: number - depth?: number -} - -export interface Directory { - type: 'dir' - cid: CID - name: string - /** - * Directory path - */ - path: string - mode?: Mode - mtime?: MTime - size?: number - depth?: number -} - -export type IPFSEntry = File | Directory - export interface BaseFile { cid: CID path: string @@ -114,59 +80,85 @@ export interface InputFile extends BaseFile { unixfs: undefined } -export interface UnixFSFile extends BaseFile { - content: () => AsyncIterable - unixfs: UnixFS -} - -export interface UnixFSEntry { - path: string - cid: CID - mode: Mode - mtime: MTime - size: number -} - -export interface MTime { +export interface MTimeLike { /** - * The number of seconds since(positive) or before (negative) the Unix Epoch + * The number of seconds since (positive) or before (negative) the Unix Epoch * began. */ - readonly secs: number + secs: number /** * The number of nanoseconds since the last full second */ - readonly nsecs: number + nsecs?: number } -export interface MTimeLike { +/** + * Time representation as tuple of two integers, as per the output of + * [`process.hrtime()`](https://nodejs.org/dist/latest/docs/api/process.html#process_process_hrtime_time). + */ +type HRTime = [number, number] + +export interface BrowserImportCandidate { + path?: string, + content?: Blob, + mtime?: Mtime, + mode?: number +} + +/** + * Represents a value that you can await on, which is either value or a promise + * of one. + */ +export type Await = + | T + | Promise + +/** + * Represents an iterable that can be used in `for await` loops, that is either + * iterable or an async iterable. + */ +export type AwaitIterable = + | Iterable + | AsyncIterable + +/** + * Common options across all cancellable requests. + */ +export interface AbortOptions { /** - * The number of seconds since(positive) or before (negative) the Unix Epoch - * began. + * Can be provided to a function that starts a long running task, which will + * be aborted when signal is triggered. */ - secs: number - + signal?: AbortSignal /** - * The number of nanoseconds since the last full second + * Can be provided to a function that starts a long running task, which will + * be aborted after provided timeout (in ms). */ - nsecs?: number + timeout?: number } -interface UnixFS { - readonly type: 'directory' | 'file' | 'dir' - readonly mode: Mode - readonly mtime: MTime - - fileSize: () => number - content: () => AsyncIterable +export interface PreloadOptions { + preload?: boolean } +export type ToJSON = + | null + | string + | number + | boolean + | ToJSON[] + | { toJSON?: () => ToJSON } & { [key: string]: ToJSON } + /** - * Time representation as tuple of two integers, as per the output of - * [`process.hrtime()`](https://nodejs.org/dist/latest/docs/api/process.html#process_process_hrtime_time). + * An IPFS path or CID */ -type HRTime = [number, number] +export type IPFSPath = CID | string + +export interface BufferStore { + put: (key: Uint8Array, value: Uint8Array) => Promise + get: (key: Uint8Array) => Promise + stores: any[] +} -// It's just a named type alias, but it better captures intent. -export type Mode = number +export type HigherOrderFn = (fn: (...args: Parameters) => ReturnType) => (...args: Parameters) => ReturnType diff --git a/packages/ipfs-core-types/src/basic.ts b/packages/ipfs-core-types/src/basic.ts deleted file mode 100644 index f492bb853a..0000000000 --- a/packages/ipfs-core-types/src/basic.ts +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Represents a value that you can await on, which is either value or a promise - * of one. - */ -export type Await = - | T - | Promise - -/** - * Represents an iterable that can be used in `for await` loops, that is either - * iterable or an async iterable. - */ -export type AwaitIterable = - | Iterable - | AsyncIterable - -/** - * Common options across all cancellable requests. - */ -export interface AbortOptions { - /** - * Can be provided to a function that starts a long running task, which will - * be aborted when signal is triggered. - */ - signal?: AbortSignal - /** - * Can be provided to a function that starts a long running task, which will - * be aborted after provided timeout (in ms). - */ - timeout?: number -} - -export type ToJSON = - | null - | string - | number - | boolean - | ToJSON[] - | { toJSON?: () => ToJSON } & { [key: string]: ToJSON } diff --git a/packages/ipfs-core-types/src/bitswap.ts b/packages/ipfs-core-types/src/bitswap.ts deleted file mode 100644 index a00c8de9b5..0000000000 --- a/packages/ipfs-core-types/src/bitswap.ts +++ /dev/null @@ -1,99 +0,0 @@ -import type BigInteger from 'bignumber.js' -import type PeerId from 'peer-id' -import type CID from 'cids' -import type { Block } from './block-service' -import type { AbortOptions, Await } from './basic' -import type { MovingAverage } from './bitswap/moving-avarage' -import type { StoreReader, StoreExporter, StoreImporter } from './store' - -export interface Bitswap extends - StoreReader, - StoreExporter, - StoreImporter { - - readonly peerId: PeerId - - enableStats: () => void - disableStats: () => void - - wantlistForPeer: (peerId: PeerId, options?: AbortOptions) => Map - ledgerForPeer: (peerId: PeerId) => null|LedgerForPeer - - put: (block: Block, options?: AbortOptions) => Await - - unwant: (cids: CID|CID[], options?: AbortOptions) => void - cancelWants: (cids: CID|CID[]) => void - getWantlist: (options?: AbortOptions) => Iterable<[string, WantListEntry]> - peers: () => PeerId[] - stat: () => Stats - start: () => void - stop: () => void -} - -export interface LedgerForPeer { - peer: string - value: number - sent: number - recv: number - exchanged: number -} - -export interface Ledger { - sentBytes: (n: number) => void - receivedBytes: (n: number) => void - - wants: (cid: CID, priority: number, wantType: WantType) => void - cancelWant: (cid: CID) => void - wantlistContains: (cid: CID) => WantListEntry|undefined - - debtRatio: () => number -} - -export interface WantListEntry { - readonly cid: CID - priority: number - inc: () => void - dec: () => void - hasRefs: () => boolean - equals: (other: WantListEntry) => boolean -} - -export interface WantList { - entries: Entry[] - full?: boolean -} - -export interface Entry { - block: Uint8Array - priority: number - cancel: boolean - wantType?: WantType - sendDontHave?: boolean -} - -export interface BlockPresence { - cid: Uint8Array - type: BlockPresenceType -} - -export type Have = 0 -export type DontHave = 1 -export type BlockPresenceType = Have | DontHave - -export type WantBlock = 0 -export type HaveBlock = 1 -export type WantType = WantBlock | HaveBlock - -export interface BlockData { - prefix: Uint8Array - data: Uint8Array -} - -export interface Stats { - enable: () => void - disable: () => void - stop: () => void - readonly snapshot: Record - readonly movingAverages: Record> - push: (peer: string|null, counter: string, inc: number) => void -} diff --git a/packages/ipfs-core-types/src/bitswap/index.d.ts b/packages/ipfs-core-types/src/bitswap/index.d.ts new file mode 100644 index 0000000000..08053c1b85 --- /dev/null +++ b/packages/ipfs-core-types/src/bitswap/index.d.ts @@ -0,0 +1,23 @@ +import type BigInteger from 'bignumber.js' +import type PeerId from 'peer-id' +import type CID from 'cids' +import type { AbortOptions } from '../basic' + +export interface API { + wantlist: (options?: AbortOptions) => Promise + wantlistForPeer: (peerId, options?: AbortOptions) => Promise + unwant: (cids: CID | CID[], options?: AbortOptions) => Promise + stat: (options?: AbortOptions) => Promise +} + +export interface Stats { + provideBufLen: number + wantlist: CID[] + peers: CID[] + blocksReceived: BigInteger + dataReceived: BigInteger + blocksSent: BigInteger + dataSent: BigInteger + dupBlksReceived: BigInteger + dupDataReceived: BigInteger +} diff --git a/packages/ipfs-core-types/src/bitswap/moving-avarage.ts b/packages/ipfs-core-types/src/bitswap/moving-avarage.ts deleted file mode 100644 index 8fe2db8431..0000000000 --- a/packages/ipfs-core-types/src/bitswap/moving-avarage.ts +++ /dev/null @@ -1,9 +0,0 @@ -export interface MovingAverage { - variance: () => number - movingAverage: () => number - - deviation: () => number - forecast: () => number - - push: (time: number, value: number) => void -} diff --git a/packages/ipfs-core-types/src/block-service.ts b/packages/ipfs-core-types/src/block-service.ts deleted file mode 100644 index c9e1271cc5..0000000000 --- a/packages/ipfs-core-types/src/block-service.ts +++ /dev/null @@ -1,25 +0,0 @@ -import type CID from 'cids' -import type { Await, AbortOptions } from './basic' -import type { StoreReader, StoreImporter, StoreExporter, StoreEraser } from './store' -import type { Bitswap } from './bitswap' - -export interface BlockService extends - StoreReader, - StoreExporter, - StoreImporter, - StoreEraser { - setExchange: (bitswap: Bitswap) => void - - unsetExchange: () => void - hasExchange: () => boolean - - /** - * Put a block to the underlying datastore. - */ - put: (block: Block, options?: AbortOptions) => Await -} - -export interface Block { - cid: CID - data: Uint8Array -} diff --git a/packages/ipfs-core-types/src/block-store.ts b/packages/ipfs-core-types/src/block-store.ts deleted file mode 100644 index b34f78fd74..0000000000 --- a/packages/ipfs-core-types/src/block-store.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { AbortOptions, Await, AwaitIterable } from './basic' -import type { Block } from './block-service' -import type CID from 'cids' - -export interface BlockStore { - has: (cid: CID, options?: AbortOptions) => Await - get: (cid: CID, options?: AbortOptions) => Await - put: (block: Block, options?: AbortOptions) => Await - putMany: (blocks: AsyncIterable|Iterable, options?: AbortOptions) => AwaitIterable -} diff --git a/packages/ipfs-core-types/src/block/index.d.ts b/packages/ipfs-core-types/src/block/index.d.ts new file mode 100644 index 0000000000..6eb178a137 --- /dev/null +++ b/packages/ipfs-core-types/src/block/index.d.ts @@ -0,0 +1,75 @@ +import { AbortOptions, PreloadOptions } from '../basic' +import CID, { CIDVersion } from 'cids' +import Block from 'ipld-block' +import { CodecName } from 'multicodec' +import { HashName } from 'multihashes' + +export interface API { + get: (cid: CID | string | Uint8Array, options?: AbortOptions & PreloadOptions) => Promise + put: (block: Block | Uint8Array, options?: PutOptions) => Promise + rm: (cid: CID | CID[], options?: RmOptions) => AsyncIterable + stat: (ipfsPath: IPFSPath, options?: AbortOptions & PreloadOptions) => Promise +} + +export interface PutOptions extends AbortOptions, PreloadOptions { + /** + * CID to store the block under - ignored if a Block is passed + */ + cid?: CID + + /** + * The codec to use to create the CID + */ + format?: CodecName + + /** + * Multihash hashing algorithm to use. (Defaults to 'sha2-256') + */ + mhtype?: HashName + + /** + * The version to use to create the CID + */ + version?: CIDVersion + + /** + * Pin this block when adding. (Defaults to `false`) + */ + pin?: boolean +} + +export interface RmOptions extends AbortOptions { + /** + * Ignores nonexistent blocks + */ + force?: boolean, + + /** + * Do not return output if true + */ + quiet?: boolean +} + +export interface RmResult extends AbortOptions { + /** + * The CID of the removed block + */ + cid: CID + + /** + * Any error that occured while trying to remove the block + */ + error?: Error +} + +export interface StatResult { + /** + * The CID of the block + */ + cid: CID + + /** + * The size of the block + */ + size: number +} diff --git a/packages/ipfs-core-types/src/bootstrap/index.ts b/packages/ipfs-core-types/src/bootstrap/index.ts new file mode 100644 index 0000000000..e97a72302a --- /dev/null +++ b/packages/ipfs-core-types/src/bootstrap/index.ts @@ -0,0 +1,10 @@ +import type { AbortOptions } from '../basic' +import type Multiaddr from 'multiaddr' + +export interface API { + add: (addr: Multiaddr, options?: AbortOptions) => Promise<{ Peers: Multiaddr[] }> + reset: (options?: AbortOptions) => Promise<{ Peers: Multiaddr[] }> + list: (options?: AbortOptions) => Promise<{ Peers: Multiaddr[] }> + rm: (addr: Multiaddr, options?: AbortOptions) => Promise<{ Peers: Multiaddr[] }> + clear: (options?: AbortOptions) => Promise<{ Peers: Multiaddr[] }> +} diff --git a/packages/ipfs-core-types/src/config/index.d.ts b/packages/ipfs-core-types/src/config/index.d.ts new file mode 100644 index 0000000000..e622475901 --- /dev/null +++ b/packages/ipfs-core-types/src/config/index.d.ts @@ -0,0 +1,120 @@ +import type { AbortOptions } from '../basic' +import { API as ProfilesAPI} from './profiles' + +export interface API { + /** + * Returns a value from the currently being used config. If the daemon + * is off, it returns the value from the stored config. + */ + get: (key: string, options?: AbortOptions) => Promise + + /** + * Returns the full config been used. If the daemon is off, it returns the + * stored config + */ + getAll: (options?: AbortOptions) => Promise + + /** + * Adds or replaces a config value. Note that restarting the node will be + * necessary for any change to take effect. + */ + set: (key: string, value: any, options?: AbortOptions) => Promise + + /** + * Replaces the full config. Note that restarting the node will be + * necessary for any change to take effect. + */ + replace: (config: Config, options?: AbortOptions) => Promise + + profiles: ProfilesAPI +} + +export interface Config { + Addresses: AddressConfig + Profiles?: string + Bootstrap: string[] + Discovery: DiscoveryConfig + Datastore?: DatastoreConfig + Identity?: IdentityConfig + Keychain?: KeychainConfig + Pubsub?: PubsubConfig + Swarm?: SwarmConfig + Routing?: RoutingConfig +} + +/** + * Contains information about various listener addresses to be used by this node + */ +export interface AddressConfig { + API?: string + RPC?: string + Delegates?: string[] + Gateway?: string + Swarm: string[] +} + +export interface DiscoveryConfig { + MDNS: MDNSDiscovery + webRTCStar: WebRTCStarDiscovery +} + +export interface MDNSDiscovery { + Enabled?: boolean + Interval?: number +} + +export interface WebRTCStarDiscovery { + Enabled?: boolean +} + +export interface DatastoreConfig { + Spec?: DatastoreSpec +} + +export interface DatastoreSpec { + +} + +export interface IdentityConfig { + /** + * The unique PKI identity label for this configs peer. Set on init and never + * read, its merely here for convenience. IPFS will always generate the peerID + * from its keypair at runtime. + */ + PeerID: string + + /** + * The base64 encoded protobuf describing (and containing) the nodes private key. + */ + PrivateKey: string +} + +export interface KeychainConfig { + DEK?: DEK +} + +export interface DEK { + keyLength?: number + iterationCount?: number + salt?: string + hash?: string +} + +export interface PubsubConfig { + PubSubRouter?: 'gossipsub' | 'floodsub' + Enabled?: booleam +} + +export interface SwarmConfig { + ConnMgr?: ConnMgrConfig + DisableNatPortMap?: boolean +} + +export interface ConnMgrConfig { + LowWater?: number + HighWater?: number +} + +export interface RoutingConfig { + Type?: string +} diff --git a/packages/ipfs-core-types/src/config/profiles/index.d.ts b/packages/ipfs-core-types/src/config/profiles/index.d.ts new file mode 100644 index 0000000000..c23b2a5ba3 --- /dev/null +++ b/packages/ipfs-core-types/src/config/profiles/index.d.ts @@ -0,0 +1,28 @@ +import type { AbortOptions } from '../../basic' + +export interface API { + /** + * List available config profiles + */ + list: (options?: AbortOptions) => Promise + + /** + * Apply a profile to the current config. Note that restarting the node + * will be necessary for any change to take effect. + */ + apply: (name: string, options?: ProfilesApplyOptions) => Promise +} + +export interface Profile { + name: string + description: string +} + +export interface ProfilesApplyOptions extends AbortOptions { + dryRun?: boolean +} + +export interface ProfilesApplyResult { + original: object + updated: object +} diff --git a/packages/ipfs-core-types/src/dag/index.d.ts b/packages/ipfs-core-types/src/dag/index.d.ts new file mode 100644 index 0000000000..722753b12f --- /dev/null +++ b/packages/ipfs-core-types/src/dag/index.d.ts @@ -0,0 +1,100 @@ +import { AbortOptions, PreloadOptions, IPFSPath } from '../basic' +import CID from 'cids' +import { CodecName } from 'multicodec' +import { HashName } from 'multihashes' + +export interface API { + get: (cid: CID, options?: GetOptions) => Promise + put: (node: any, options?: PutOptions) => Promise + tree: (cid: CID, options?: TreeOptions) => Promise + resolve: (ipfsPath: IPFSPath, options?: ResolveOptions) => Promise +} + +export interface GetOptions extends AbortOptions, PreloadOptions { + /** + * An optional path within the DAG to resolve + */ + path?: string + + /** + * If set to true, it will avoid resolving through different objects + */ + localResolve?: boolean +} + +export interface GetResult { + /** + * The value or node that was fetched during the get operation + */ + value: any + + /** + * The remainder of the Path that the node was unable to resolve or what was left in a localResolve scenario + */ + remainderPath?: string +} + +export interface PutOptions extends AbortOptions, PreloadOptions { + /** + * CID to store the value with + */ + cid?: CID + + /** + * The codec to use to create the CID (ignored if `cid` is passed) + */ + format?: CodecName + + /** + * Multihash hashing algorithm to use (ignored if `cid` is passed) + */ + hashAlg?: HashName + + /** + * The version to use to create the CID (ignored if `cid` is passed) + */ + version?: CIDVersion + + /** + * Pin this block when adding. (Defaults to `false`) + */ + pin?: boolean +} + +export interface RmOptions extends AbortOptions { + /** + * Ignores non-existent blocks + */ + force?: boolean +} + +export interface TreeOptions extends AbortOptions, PreloadOptions { + /** + * An optional path within the DAG to resolve + */ + path?: string + + /** + * If set to true, it will follow the links and continuously run tree on them, returning all the paths in the graph + */ + recursive?: boolean +} + +export interface ResolveOptions extends AbortOptions, PreloadOptions { + /** + * If ipfsPath is a CID, you may pass a path here + */ + path?: string +} + +export interface ResolveResult { + /** + * The last CID encountered during the traversal and the path to the end of the IPFS path inside the node referenced by the CID + */ + cid: CID + + /** + * The remainder of the Path that the node was unable to resolve + */ + remainderPath?: string +} \ No newline at end of file diff --git a/packages/ipfs-core-types/src/datastore.ts b/packages/ipfs-core-types/src/datastore.ts deleted file mode 100644 index 51e69384bc..0000000000 --- a/packages/ipfs-core-types/src/datastore.ts +++ /dev/null @@ -1,184 +0,0 @@ -import type { KeyValueStore, StoreBatch, StoreSelector, Resource } from './store' -export interface DataStore extends - KeyValueStore, - StoreSelector, - StoreBatch, - Resource { -} - -export interface Key { - /** - * Returns the "name" of this key (field of last namespace). - * - * @example - * ```js - * key.toString() - * // '/Comedy/MontyPython/Actor:JohnCleese' - * key.name() - * // 'JohnCleese' - * ``` - */ - name: () => string - - /** - * Returns the "type" of this key (value of last namespace). - * - * @example - * ```js - * key.toString() - * '/Comedy/MontyPython/Actor:JohnCleese' - * key.type() - * // 'Actor' - * ``` - */ - type: () => string - - /** - * Returns the `namespaces` making up this `Key`. - */ - namespaces: () => string[] - - /** - * Returns the "base" namespace of this key. - * - * @example - * ```js - * key.toString() - * // '/Comedy/MontyPython/Actor:JohnCleese' - * key.baseNamespace() - * // 'Actor:JohnCleese' - */ - baseNamespace: () => string - - /** - * Returns an "instance" of this type key (appends value to namespace). - * - * @example - * ```js - * key.toString() - * // '/Comedy/MontyPython/Actor' - * key.instance('JohnClesse').toString() - * // '/Comedy/MontyPython/Actor:JohnCleese' - * ``` - */ - instance: () => Key - - /** - * Returns the "path" of this key (parent + type). - * - * @example - * ```js - * key.toString() - * '/Comedy/MontyPython/Actor:JohnCleese' - * key.path().toString() - * // '/Comedy/MontyPython/Actor' - * ``` - */ - path: () => Key - - /** - * Returns the `parent` Key of this Key. - * - * @example - * ```js - * key.toString() - * "/Comedy/MontyPython/Actor:JohnCleese" - * key.parent().toString() - * // "/Comedy/MontyPython" - * ``` - */ - parent: () => Key - - /** - * Returns the `child` Key of this Key. - * - * @example - * ```js - * key.toString() - * '/Comedy/MontyPython' - * child.toString() - * // 'Actor:JohnCleese' - * key.child(child).toString() - * '/Comedy/MontyPython/Actor:JohnCleese' - * ``` - */ - child: (key: Key) => Key - - /** - * Check if the given key is sorted lower than this. - */ - less: (key: Key) => boolean - - /** - * Returns whether this key is a prefix of `other` - * - * @example - * ```js - * comedy.toString() - * '/Comedy' - * monty.toString() - * '/Comedy/MontyPython' - * comedy.isAncestorOf(monty) - * // true - * ``` - */ - isAncestorOf: (other: Key) => boolean - - /** - * Returns whether this key is a contains `other` as prefix. - * ```js - * comedy.toString() - * '/Comedy' - * monty.toString() - * '/Comedy/MontyPython' - * monty.isDecendantOf(comedy) - * // true - * ``` - */ - isDecendantOf: (other: Key) => boolean - - /** - * Returns wether this key has only one namespace. - */ - isTopLevel: () => boolean - - /** - * Returns the key with all parts in reversed order. - * - * @example - * ```js - * key.toString() - * // '/Comedy/MontyPython/Actor:JohnCleese' - * key.reverse().toString() - * // /Actor:JohnCleese/MontyPython/Comedy - * new Key('/Comedy/MontyPython/Actor:JohnCleese').reverse() - * ``` - */ - reverse: () => Key - - /** - * Concats one or more Keys into one new Key. - */ - concat: (...keys: Key[]) => Key - - /** - * Returns the array representation of this key. - * - * @example - * ```js - * key.toString() - * // '/Comedy/MontyPython/Actor:JohnCleese' - * key.list() - * // ['Comedy', 'MontyPythong', 'Actor:JohnCleese'] - * ``` - */ - list: () => string[] - toString: () => string -} - -export type Value = Uint8Array - -export interface Entry { - key: Key - value: Value -} diff --git a/packages/ipfs-core-types/src/dht/index.d.ts b/packages/ipfs-core-types/src/dht/index.d.ts new file mode 100644 index 0000000000..39ad3a91e6 --- /dev/null +++ b/packages/ipfs-core-types/src/dht/index.d.ts @@ -0,0 +1,44 @@ +import type { AbortOptions } from '../basic' +import type PeerId from 'peer-id' +import type Multiaddr from 'multiaddr' +import type CID from 'cids' + +export interface API { + findPeer: (peerId: PeerId, options?: AbortOptions) => Promise + findProvs: (cid: CID, options?: DHTFindProvsOptions) => Promise + get: (key: Uint8Array, options?: AbortOptions) => Promise + provide: (cid: CID | CID[], options?: DHTProvideOptions) => AsyncIterable + put: (key: Uint8Array, value: Uint8Array, options?: AbortOptions) => AsyncIterable + query: (peerId: PeerId | CID, options?: AbortOptions) => AsyncIterable +} + +export interface PeerResult { + id: string + addrs: Multiaddr[] +} + +export interface DHTFindProvsOptions extends AbortOptions { + numProviders: number +} + +export interface DHTProvideOptions extends AbortOptions { + recursive: boolean +} + +export enum QueryEventType { + SendingQuery = 1, + PeerResponse, + FinalPeer, + QueryError, + Provider, + Value, + AddingPeer, + DialingPeer +} + +export interface DHTQueryMessage { + extra: string + id: string + responses: PeerResult[] + type: QueryEventType +} diff --git a/packages/ipfs-core-types/src/files/index.d.ts b/packages/ipfs-core-types/src/files/index.d.ts new file mode 100644 index 0000000000..b91db33f90 --- /dev/null +++ b/packages/ipfs-core-types/src/files/index.d.ts @@ -0,0 +1,329 @@ +import { AbortOptions, IPFSPath } from '../basic' +import { ToMTime } from './files' +import CID, { CIDVersion } from 'cids' +import { CodecName } from 'multicodec' +import { HashName } from 'multihashes' +import { Mtime } from 'ipfs-unixfs' + +export interface API { + chmod: (path: string, mode: number | string, options?: ChmodOptions) => Promise + cp: (from: IPFSPath | IPFSPath[], to: string, options?: CpOptions) => Promise + mkdir: (path: string, options?: MkdirOptions) => Promise + stat: (ipfsPath: IPFSPath, options?: StatOptions) => Promise + touch: (ipfsPath: string, options?: TouchOptions) => Promise + rm: (ipfsPaths: string | string[], options?: RmOptions) => Promise + read: (ipfsPath: IPFSPath, options?: ResolveOptions) => AsyncIterable + write: (ipfsPath: string, content: string | Uint8Array | Blob | AsyncIterable | Iterable, options?: WriteOptions) => Promise + mv: (from: string | string[], to: string, options?: MvOptions) => Promise + flush: (ipfsPath: string, options?: AbortOptions) => Promise + ls: (ipfsPath: IPFSPath, options?: AbortOptions) => AsyncIterable +} + +export interface MFSEntry { + /** + * The object's name + */ + name: string + + /** + * The object's type (directory or file) + */ + type: 'directory' | 'file' + + /** + * The size of the file in bytes + */ + size: number + + /** + * The CID of the object + */ + cid: CID + + /** + * The UnixFS mode as a Number + */ + mode?: number + + /** + * An object with numeric secs and nsecs properties + */ + mtime?: Mtime +} + +export interface MFSOptions { + /** + * If true the changes will be immediately flushed to disk + */ + flush?: boolean +} + +export interface ChmodOptions extends MFSOptions, AbortOptions { + /** + * If true mode will be applied to the entire tree under path + */ + recursive?: boolean + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion +} + +export interface CpOptions extends MFSOptions, AbortOptions { + /** + * The value or node that was fetched during the get operation + */ + parents?: boolean + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion + + /** + * The threshold for splitting any modified folders into HAMT shards + */ + shardSplitThreshold?: number +} + +export interface MkdirOptions extends MFSOptions, AbortOptions { + /** + * If true, create intermediate directories + */ + parents?: boolean + + /** + * An integer that represents the file mode + */ + mode?: number + + /** + * A Date object, an object with { secs, nsecs } properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of process.hrtime() + */ + mtime?: ToMTime + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion + + /** + * The threshold for splitting any modified folders into HAMT shards + */ + shardSplitThreshold?: number +} + +export interface StatOptions extends AbortOptions { + /** + * If true, return only the CID + */ + hash?: boolean + + /** + * If true, return only the size + */ + size?: boolean + + /** + * If true, compute the amount of the DAG that is local and if possible the total size + */ + withLocal?: boolean +} + +export interface StatResult { + /** + * A CID instance + */ + cid: CID + + /** + * The file size in Bytes + */ + size: number + + /** + * The size of the DAGNodes making up the file in Bytes + */ + cumulativeSize: number + + /** + * Either directory or file + */ + type: 'directory' | 'file' + + /** + * If type is directory, this is the number of files in the directory. If it is file it is the number of blocks that make up the file + */ + blocks: number + + /** + * Indicates if locality information is present + */ + withLocality: boolean + + /** + * Indicates if the queried dag is fully present locally + */ + local?: boolean + + /** + * Indicates the cumulative size of the data present locally + */ + sizeLocal?: number + + /** + * UnixFS mode if applicable + */ + mode?: number + + /** + * UnixFS mtime if applicable + */ + mtime?: Mtime +} + +export interface TouchOptions extends MFSOptions, AbortOptions { + /** + * A Date object, an object with { secs, nsecs } properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of process.hrtime() + */ + mtime?: ToMTime + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion +} + +export interface RmOptions extends MFSOptions, AbortOptions { + /** + * If true all paths under the specifed path(s) will be removed + */ + recursive?: boolean + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion + + /** + * The threshold for splitting any modified folders into HAMT shards + */ + shardSplitThreshold?: number +} + +export interface ReadOptions extends AbortOptions { + /** + * An offset to start reading the file from + */ + offset?: number + + /** + * An optional max length to read from the file + */ + length?: number +} + +export interface WriteOptions extends MFSOptions, AbortOptions { + /** + * An offset within the file to start writing at + */ + offset?: number + + /** + * Optionally limit how many bytes are written + */ + length?: number + + /** + * Create the MFS path if it does not exist + */ + create?: boolean + + /** + * Create intermediate MFS paths if they do not exist + */ + parents?: boolean + + /** + * Truncate the file at the MFS path if it would have been larger than the passed content + */ + truncate?: boolean + + /** + * If true, DAG leaves will contain raw file data and not be wrapped in a protobuf + */ + rawLeaves?: boolean + + /** + * An integer that represents the file mode + */ + mode?: number + + /** + * A Date object, an object with { secs, nsecs } properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of process.hrtime() + */ + mtime?: ToMTime + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion + + /** + * The threshold for splitting any modified folders into HAMT shards + */ + shardSplitThreshold?: number +} + +export interface MvOptions extends extends MFSOptions, AbortOptions { + /** + * Create intermediate MFS paths if they do not exist + */ + parents?: boolean + + /** + * The hash algorithm to use for any updated entries + */ + hashAlg?: HashName + + /** + * The CID version to use for any updated entries + */ + cidVersion?: CIDVersion + + /** + * The threshold for splitting any modified folders into HAMT shards + */ + shardSplitThreshold?: number +} diff --git a/packages/ipfs-core-types/src/index.d.ts b/packages/ipfs-core-types/src/index.d.ts new file mode 100644 index 0000000000..5a8640a028 --- /dev/null +++ b/packages/ipfs-core-types/src/index.d.ts @@ -0,0 +1,49 @@ +import { API as RootAPI } from './root' +import { API as BitswapAPI } from './bitswap' +import { API as BlockAPI } from './block' +import { API as BootstrapAPI } from './bootstrap' +import { API as ConfigAPI } from './config' +import { API as DAGAPI } from './dag' +import { API as DHTAPI } from './dht' +import { API as FilesAPI } from './files' +import { API as KeyAPI } from './key' +import { API as NameAPI } from './name' +import { API as ObjectAPI } from './object' +import { API as PinAPI } from './pin' +import { API as PubsubAPI } from './pubsub' +import { API as RefsAPI } from './refs' +import { API as RepoAPI } from './repo' +import { API as StatsAPI } from './stats' +import { API as SwarmAPI } from './swarm' +import { AbortOptions, Await, AwaitIterable } from './basic' + +export interface IPFS extends RootAPI { + bitswap: BitswapAPI + block: BlockAPI + bootstrap: BootstrapAPI + config: ConfigAPI + dag: DAGAPI + dht: DHTAPI + files: FilesAPI + key: KeyAPI + name: NameAPI + object: ObjectAPI + pin: PinAPI + pubsub: PubsubAPI + refs: Refs + repo: RepoAPI + stats: StatsAPI + swarm: SwarmAPI +} + +interface Refs extends RefsAPI["refs"] { + local: RefsAPI["local"] +} + +export type { + IPFS, + + AbortOptions, + Await, + AwaitIterable +} diff --git a/packages/ipfs-core-types/src/index.ts b/packages/ipfs-core-types/src/index.ts deleted file mode 100644 index b1898affd0..0000000000 --- a/packages/ipfs-core-types/src/index.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { RootAPI } from './root' -import { AbortOptions, Await, AwaitIterable } from './basic' - -export type { - RootAPI, - - AbortOptions, - Await, - AwaitIterable -} diff --git a/packages/ipfs-core-types/src/ipld.ts b/packages/ipfs-core-types/src/ipld.ts deleted file mode 100644 index c15adceb82..0000000000 --- a/packages/ipfs-core-types/src/ipld.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { BlockService } from './block-service' -import CID from 'cids' -import { Await, AwaitIterable, AbortOptions } from './basic' -import { StoreReader, StoreExporter, StoreEraser } from './store' -import { ResolveResult, Format } from './ipld/format' - -export interface IPLD extends - StoreReader, - StoreExporter, - StoreEraser { - put: (value: T, format: FormatCode, options?: PutOptions & AbortOptions) => Await - putMany: (values: AwaitIterable, format: FormatCode, options?: PutOptions) => AwaitIterable - - resolve: (cid: CID, path: string, options?: AbortOptions) => AwaitIterable> - tree: (cid: CID, path?: string, options?: TreeOptions & AbortOptions) => AwaitIterable - - addFormat: (format: Format) => IPLD - removeFormat: (format: Format) => IPLD - - defaultOptions: Options -} - -export type FormatCode = number -export type HashAlg = number - -export interface Options { - blockService?: BlockService - formats?: Record - - loadFormat?: (code: number|string) => Promise> -} - -export interface PutOptions { - hashAlg?: HashAlg - cidVersion?: 0|1 - onlyHash?: boolean - -} - -export interface TreeOptions { - recursive?: boolean -} diff --git a/packages/ipfs-core-types/src/ipld/format.ts b/packages/ipfs-core-types/src/ipld/format.ts deleted file mode 100644 index 6a459b8634..0000000000 --- a/packages/ipfs-core-types/src/ipld/format.ts +++ /dev/null @@ -1,42 +0,0 @@ - -import CID from 'cids' -import { Await } from '../basic' - -export interface Format { - util: Util - resolver: Resolver - - defaultHashArg: string | number - codec: string | number -} - -export interface Util { - /** - * Serialize an IPLD Node into a binary blob. - */ - serialize: (node: T) => Uint8Array - /** - * Deserialize a binary blob into an IPLD Node. - */ - deserialize: (bytes: Uint8Array) => T - - /** - * Calculate the CID of the binary blob. - */ - cid: (bytes: Uint8Array, options?: CIDOptions) => Await -} - -export interface CIDOptions { - cidVersion?: number - hashAlg?: number | string -} - -export interface Resolver { - resolve: (bytes: Uint8Array, path: string) => ResolveResult - tree: (byte: Uint8Array) => string[] -} - -export interface ResolveResult { - value: T - remainderPath: string -} diff --git a/packages/ipfs-core-types/src/key/index.d.ts b/packages/ipfs-core-types/src/key/index.d.ts new file mode 100644 index 0000000000..91bad43d6a --- /dev/null +++ b/packages/ipfs-core-types/src/key/index.d.ts @@ -0,0 +1,28 @@ +import type { AbortOptions } from '../basic' + +export interface API { + gen: (name: string, options?: GenOptions) => Promise + list: (options?: AbortOptions) => Promise + rm: (name: string, options?: AbortOptions) => Promise + rename: (oldName: string, newName: string, options?: AbortOptions) => Promise + export: (name: string, password: string, options?: AbortOptions) => Promise + import: (name: string, pem: string, password: string, options?: AbortOptions) => Promise + info: (name: string, options?: AbortOptions) => Promise +} + +export interface GenOptions extends AbortOptions { + type: string + size: number +} + +export interface Key { + id: string + name: key +} + +export interface RenameKeyResult { + id: string + was: string + now: string + overwrite: boolean +} diff --git a/packages/ipfs-core-types/src/name/index.d.ts b/packages/ipfs-core-types/src/name/index.d.ts new file mode 100644 index 0000000000..ef47f7a13e --- /dev/null +++ b/packages/ipfs-core-types/src/name/index.d.ts @@ -0,0 +1,94 @@ +import CID from 'cids'; +import type { AbortOptions } from '../basic' +import type { API as PubsubAPI } from './pubsub' + +export interface API { + /** + * IPNS is a PKI namespace, where names are the hashes of public keys, and + * the private key enables publishing new (signed) values. In both publish + * and resolve, the default name used is the node's own PeerID, + * which is the hash of its public key. + * + * @example + * ```js + * // The address of your files. + * const addr = '/ipfs/QmbezGequPwcsWo8UL4wDF6a8hYwM1hmbzYv2mnKkEWaUp' + * const res = await ipfs.name.publish(addr) + * // You now have a res which contains two fields: + * // - name: the name under which the content was published. + * // - value: the "real" address to which Name points. + * console.log(`https://gateway.ipfs.io/ipns/${res.name}`) + * ``` + */ + publish: (value: CID, options?: PublishOptions) => Promise + + /** + * Given a key, query the DHT for its best value. + * + * @example + * ```js + * // The IPNS address you want to resolve. + * const addr = '/ipns/ipfs.io' + * + * for await (const name of ipfs.name.resolve(addr)) { + * console.log(name) + * } + * // Logs: /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm + * ``` + */ + resolve: (value: string, options?: ResolveOptions) => AsyncIterable + + pubsub: PubsubAPI +} + +export interface PublishOptions extends AbortOptions { + /** + * Resolve given path before publishing + */ + resolve?: boolean + /** + * Time duration of the record + */ + lifetime?: string + /** + * Time duration this record should be cached + */ + ttl?: string + /** + * Name of the key to be used + */ + key?: string + /** + * When offline, save the IPNS record + * to the the local datastore without broadcasting to the network instead of + * simply failing. + * + * This option is not yet implemented in js-ipfs. See tracking issue [ipfs/js-ipfs#1997] + * (https://github.com/ipfs/js-ipfs/issues/1997). + */ + allowOffline?: boolean +} + +export interface PublishResult { + /** + * The published IPNS name + */ + name: string + + /** + * The IPNS record + */ + value: Uint8Array +} + +export interface ResolveOptions extends AbortOptions { + /** + * resolve until the result is not an IPNS name + */ + recursive?: boolean + + /** + * do not use cached entries + */ + nocache?: boolean +} diff --git a/packages/ipfs-core-types/src/name/pubsub/index.d.ts b/packages/ipfs-core-types/src/name/pubsub/index.d.ts new file mode 100644 index 0000000000..3a0ea16a00 --- /dev/null +++ b/packages/ipfs-core-types/src/name/pubsub/index.d.ts @@ -0,0 +1,16 @@ +import CID from 'cids'; +import type { AbortOptions } from '../../basic' + +export interface API { + cancel: (name: string, options?: AbortOptions) => Promise + state: (options?: AbortOptions) => Promise + subs: (options?: AbortOptions) => Promise +} + +export interface PubsubCancelResult { + canceled: boolean +} + +export interface PubsubStateResult { + enabled: boolean +} diff --git a/packages/ipfs-core-types/src/object/index.d.ts b/packages/ipfs-core-types/src/object/index.d.ts new file mode 100644 index 0000000000..bda659a276 --- /dev/null +++ b/packages/ipfs-core-types/src/object/index.d.ts @@ -0,0 +1,34 @@ +import CID from 'cids'; +import type { AbortOptions, PreloadOptions } from '../basic' +import type { DAGNode, DAGNodeLike, DAGLink } from 'ipld-dag-pb' +import type { API as PatchAPI } from './patch' + +export interface API { + new: (options?: NewObjectOptions) => Promise + put: (obj: DAGNode | DAGNodeLike | Uint8Array, options?: PutOptions) => Promise + get: (cid: CID, options?: AbortOptions & PreloadOptions) => Promise + data: (cid: CID, options?: AbortOptions & PreloadOptions) => Promise + links: (cid, options?: AbortOptions & PreloadOptions) => Promise + stat: (cid, options?: AbortOptions & PreloadOptions) => Promise + + patch: PatchAPI +} + +export interface NewObjectOptions extends AbortOptions, PreloadOptions { + template?: 'unixfs-dir' +} + +export interface PutOptions extends AbortOptions, PreloadOptions { + enc?: PutEncoding +} + +export interface StatResult { + Hash: string + NumLinks: number + BlockSize: number + LinksSize: number + DataSize: number + CumulativeSize: number +} + +export type PutEncoding = 'json' | 'protobuf' \ No newline at end of file diff --git a/packages/ipfs-core-types/src/object/patch/index.d.ts b/packages/ipfs-core-types/src/object/patch/index.d.ts new file mode 100644 index 0000000000..7993d331b8 --- /dev/null +++ b/packages/ipfs-core-types/src/object/patch/index.d.ts @@ -0,0 +1,10 @@ +import type CID from 'cids'; +import type { AbortOptions } from '../../basic' +import type { DAGLink } from 'ipld-dag-pb' + +export interface API { + addLink: (cid: CID, link: DAGLink, options?: AbortOptions) => Promise + rmLink: (cid: CID, link: DAGLink, options?: AbortOptions) => Promise + appendData: (cid: CID, data: Uint8Array, options?: AbortOptions) => Promise + setData: (cid: CID, data: Uint8Array, options?: AbortOptions) => Promise +} diff --git a/packages/ipfs-core-types/src/pin/index.d.ts b/packages/ipfs-core-types/src/pin/index.d.ts new file mode 100644 index 0000000000..ea12573991 --- /dev/null +++ b/packages/ipfs-core-types/src/pin/index.d.ts @@ -0,0 +1,92 @@ +import type { AbortOptions, AwaitIterable } from '../basic' +import type CID from 'cids' +import type { API as remote } from './remote' + +export interface API { + add: (cid: string | CID, options?: AddOptions) => Promise + addAll: (source: AwaitIterable, options?: AddAllOptions) => AsyncIterable + ls: (options?: LsOptions) => AsyncIterable + rm: (ipfsPath: string | CID, options?: RmOptions) => Promise + rmAll: (source: AwaitIterable, options?: AbortOptions) => AsyncIterable + + remote +} + +export interface AddOptions extends AbortOptions { + /** + * If true, pin all blocked linked to from the pinned CID + */ + recursive?: boolean + + /** + * Whether to preload all blocks pinned during this operation + */ + preload?: boolean + + /** + * Internal option used to control whether to create a repo write lock during a pinning operation + */ + lock?: boolean +} + +export interface AddAllOptions extends AbortOptions { + lock?: boolean + + /** + * Whether to preload all blocks pinned during this operation + */ + preload?: boolean + + /** + * Internal option used to control whether to create a repo write lock during a pinning operation + */ + lock?: boolean +} + +export interface AddInput { + /** + * A CID to pin - nb. you must pass either `cid` or `path`, not both + */ + cid?: CID + + /** + * An IPFS path to pin - nb. you must pass either `cid` or `path`, not both + */ + path?: string + + /** + * If true, pin all blocked linked to from the pinned CID + */ + recursive?: boolean + + /** + * A human readable string to store with this pin + */ + comments?: string +} + +export type PinType = 'recursive' | 'direct' | 'indirect' | 'all' + +export type PinQueryType = 'recursive' | 'direct' | 'indirect' | 'all' + +export interface LsOptions extends AbortOptions { + paths?: CID | CID[] | string | string[] + type?: PinQueryType +} + +export interface LsResult { + cid: CID + type: PinType | string + metadata?: Record +} + +export interface RmOptions extends AbortOptions { + recursive?: boolean +} + +export interface RmAllInput { + cid?: CID + path?: string + recursive?: boolean +} + diff --git a/packages/ipfs-core-types/src/pin/remote.ts b/packages/ipfs-core-types/src/pin/remote/index.d.ts similarity index 95% rename from packages/ipfs-core-types/src/pin/remote.ts rename to packages/ipfs-core-types/src/pin/remote/index.d.ts index 7dde960720..43b1324b8e 100644 --- a/packages/ipfs-core-types/src/pin/remote.ts +++ b/packages/ipfs-core-types/src/pin/remote/index.d.ts @@ -1,7 +1,7 @@ import CID from 'cids' import Multiaddr from 'multiaddr' -import { API as Service } from './remote/service' -import { AbortOptions } from '../basic' +import { API as Service } from './service' +import { AbortOptions } from '../../basic' export interface API { /** diff --git a/packages/ipfs-core-types/src/pin/remote/service.ts b/packages/ipfs-core-types/src/pin/remote/service/index.d.ts similarity index 96% rename from packages/ipfs-core-types/src/pin/remote/service.ts rename to packages/ipfs-core-types/src/pin/remote/service/index.d.ts index 7368f14403..f221b14091 100644 --- a/packages/ipfs-core-types/src/pin/remote/service.ts +++ b/packages/ipfs-core-types/src/pin/remote/service/index.d.ts @@ -1,4 +1,4 @@ -import { AbortOptions } from '../../basic' +import { AbortOptions } from '../../../basic' export interface API { /** diff --git a/packages/ipfs-core-types/src/preload.ts b/packages/ipfs-core-types/src/preload.ts deleted file mode 100644 index ea62ac506d..0000000000 --- a/packages/ipfs-core-types/src/preload.ts +++ /dev/null @@ -1,3 +0,0 @@ -export interface Options { - preload?: boolean -} diff --git a/packages/ipfs-core-types/src/pubsub/index.d.ts b/packages/ipfs-core-types/src/pubsub/index.d.ts new file mode 100644 index 0000000000..8af4c6da60 --- /dev/null +++ b/packages/ipfs-core-types/src/pubsub/index.d.ts @@ -0,0 +1,18 @@ +import type { AbortOptions } from '../basic' + +export interface API { + subscribe: (topic: string, handler: MessageHandlerFn, options?: AbortOptions) => Promise + unsubscribe: (topic: string, handler: MessageHandlerFn, options?: AbortOptions) => Promise + publish: (topic: string, data: string | Uint8Array, options?: AbortOptions) => Promise + ls: (options?: AbortOptions) => Promise + peers: (topic: string, options?: AbortOptions) => Promise +} + +export interface Message { + from: string + seqno: Uint8Array + data: Uint8Array + topidIds: string[] +} + +export type MessageHandlerFn = (message: Message) => void diff --git a/packages/ipfs-core-types/src/refs/index.d.ts b/packages/ipfs-core-types/src/refs/index.d.ts new file mode 100644 index 0000000000..41ee48294a --- /dev/null +++ b/packages/ipfs-core-types/src/refs/index.d.ts @@ -0,0 +1,24 @@ +import type { AbortOptions, PreloadOptions } from '../basic' +import type CID from 'cids' + +export type API = { + refs: Refs + local: Local +} + +export type Refs = (ipfsPath: string | CID, options?: RefsOptions) => AsyncIterable + +export interface RefsOptions extends AbortOptions, PreloadOptions { + recursive?: boolean + unique?: boolean + format?: string + edges?: boolean + maxDepth?: number +} + +export type Local = (options?: AbortOptions) => AsyncIterable + +export interface RefsResult { + ref: string + error?: Error +} diff --git a/packages/ipfs-core-types/src/repo.ts b/packages/ipfs-core-types/src/repo.ts deleted file mode 100644 index ca208d5be1..0000000000 --- a/packages/ipfs-core-types/src/repo.ts +++ /dev/null @@ -1,102 +0,0 @@ -import CID from 'cids' -import { Block } from './block-service' -import { ToJSON, Await, AbortOptions } from './basic' -import { DataStore, Key } from './datastore' -import { - ValueStore, StoreReader, Resource, StoreLookup, - StoreImporter, StoreExporter, StoreEraser, StoreSelector, - KeyValueStore -} from './store' - -export interface Repo extends Resource { - readonly path: string - closed: boolean - - /** - * Initializes necessary structures inside the repo - */ - init: (config: Partial) => Await - - /** - * Tells whether this repo exists or not. - */ - exists: () => Await - - /** - * Tells whether the repo has been initialized. - */ - isInitialized: () => Await - - /** - * Gets the repo status. - */ - stat: (options?: AbortOptions) => Await - - root: KeyValueStore - - blocks: BlockStore - datastore: DataStore - - pins: PinStore - config: ConfigStore - keys: KeyStore - - version: ValueStore - apiAddr: ValueStore -} - -export interface RepoStatus { - numObjects: number - repoPath: string - repoSize: number - version: number - storageMax: number -} - -interface BlockStore extends - StoreImporter, - StoreReader, - StoreLookup, - StoreExporter, - StoreEraser, - StoreSelector { - put: (block: Block) => Await -} - -export interface ConfigStore extends - StoreReader { - /** - * Set a config `value`, where `value` can be anything that is serializable - * to JSON. - */ - set: (key: string, value: ToJSON, options?: AbortOptions) => Await - - /** - * Set the whole `config` which can be a any value that is serializable to - * JSON. - * - * @param config - */ - replace: (config: Config, options?: AbortOptions) => Await - - /** - * Get the entire config value. - */ - getAll: (options?: AbortOptions) => Await - - /** - * Whether the config sub-repo exists. - */ - exists: () => Await -} - -export interface PinStore extends - KeyValueStore, - Object { -} - -export interface KeyStore extends - KeyValueStore, - Object { - -} diff --git a/packages/ipfs-core-types/src/repo/index.d.ts b/packages/ipfs-core-types/src/repo/index.d.ts new file mode 100644 index 0000000000..945609485b --- /dev/null +++ b/packages/ipfs-core-types/src/repo/index.d.ts @@ -0,0 +1,35 @@ +import type { AbortOptions } from '../basic' +import type BigInteger from 'bignumber.js' +import CID from 'cids' + +export interface API { + gc: (options?: GCOptions) => AsyncIterable + stat: (options?: StatOptions) => Promise + version: (options?: AbortOptions) => Promise +} + +export interface GCOptions extends AbortOptions { + quiet?: boolean +} + +export interface GCError { + err: Error +} + +export interface GCSuccess { + cid: CID +} + +export type GCResult = GCSuccess | GCError + +export interface StatOptions extends AbortOptions { + human: boolean +} + +export interface StatResult { + numObjects: BigInteger + repoPath: string + repoSize: BigInteger + version: number + storageMax: BigInteger +} diff --git a/packages/ipfs-core-types/src/root.ts b/packages/ipfs-core-types/src/root.d.ts similarity index 50% rename from packages/ipfs-core-types/src/root.ts rename to packages/ipfs-core-types/src/root.d.ts index 5472b2bf89..ddd279aa90 100644 --- a/packages/ipfs-core-types/src/root.ts +++ b/packages/ipfs-core-types/src/root.d.ts @@ -1,18 +1,63 @@ -import { AbortOptions } from './basic' -import { Options as PreloadOptions } from './preload' -import { ImportSource, IPFSEntry, ToEntry, UnixFSEntry } from './files' -import CID, { CIDVersion } from 'cids' - -export interface RootAPI { - add: (entry: ToEntry, options?: AddOptions) => Promise - addAll: (source: ImportSource, options?: AddAllOptions & AbortOptions) => AsyncIterable +import type { AbortOptions, PreloadOptions, IPFSPath, ImportSource, ToEntry, IPFSEntry } from './basic' +import type CID, { CIDVersion } from 'cids' +import type { ImportResult } from 'ipfs-unixfs-importer' +import type PeerId from 'peer-id' +import type Multiaddr from 'multiaddr' +import type { BaseName } from 'multibase' + +export interface API { + add: (entry: ToEntry, options?: AddOptions) => Promise + addAll: (source: ImportSource, options?: AddAllOptions & AbortOptions) => AsyncIterable cat: (ipfsPath: IPFSPath, options?: CatOptions) => AsyncIterable get: (ipfsPath: IPFSPath, options?: GetOptions) => AsyncIterable - ls: (ipfsPath: IPFSPath, options?: ListOptions) => AsyncIterable + + id: (options?: AbortOptions) => Promise + version: (options?: AbortOptions) => Promise + dns: (domain: string, options?: DNSOptions) => Promise + start: () => Promise + stop: (options?: AbortOptions) => Promise + ping: (peerId: PeerId | CID, options?: PingOptions) => AsyncIterable + resolve: (name: string, options?: ResolveOptions) => Promise + isOnline: () => boolean } -export interface AddProgressFn { (bytes: number, path: string): void } +export interface File { + readonly type: 'file' + readonly cid: CID + readonly name: string + + /** + * File path + */ + readonly path: string + /** + * File content + */ + readonly content?: AsyncIterable + mode?: number + mtime?: Mtime + size: number + depth: number +} + +export interface Directory { + type: 'dir' + cid: CID + name: string + /** + * Directory path + */ + path: string + mode?: number + mtime?: Mtime + size: number + depth: number +} + +export type IPFSEntry = File | Directory + +export type AddProgressFn = (bytes: number, path?: string) => void export interface AddOptions extends AbortOptions { /** @@ -65,6 +110,10 @@ export interface AddOptions extends AbortOptions { */ wrapWithDirectory?: boolean + /** + * Whether to preload all blocks created during this operation + */ + preload?: boolean } export interface AddAllOptions extends AddOptions { @@ -105,7 +154,46 @@ export interface ListOptions extends AbortOptions, PreloadOptions { includeContent?: boolean } +export interface IDResult { + id: string + publicKey: string + addresses: Multiaddr[] + agentVersion: string + protocolVersion: string + protocols: string[] +} + /** - * An IPFS path or CID + * An object with the version information for the implementation, + * the commit and the Repo. `js-ipfs` instances will also return + * the version of `interface-ipfs-core` and `ipfs-http-client` + * supported by this node */ -export type IPFSPath = CID | string +export interface VersionResult { + version: string + commit?: string + repo?: string + system?: string + golang?: string + 'interface-ipfs-core'?: string + 'ipfs-http-client'?: string +} + +export interface DNSOptions extends AbortOptions { + recursive?: boolean +} + +export interface PingOptions extends AbortOptions { + count?: number +} + +export interface PingResult { + success: boolean + time: number + text: string +} + +export interface ResolveOptions extends AbortOptions { + recursive?: boolean + cidBase?: BaseName +} diff --git a/packages/ipfs-core-types/src/stats/index.d.ts b/packages/ipfs-core-types/src/stats/index.d.ts new file mode 100644 index 0000000000..59e4839561 --- /dev/null +++ b/packages/ipfs-core-types/src/stats/index.d.ts @@ -0,0 +1,26 @@ +import type { AbortOptions } from '../basic' +import { API as BitswapAPI } from '../bitswap' +import { API as RepoAPI } from '../repo' +import type PeerId from 'peer-id' +import type CID from 'cid' +import type BigInteger from 'bignumber.js' + +export interface API { + bitswap: BitswapAPI["stat"] + repo: RepoAPI["stat"] + bw: (options?: BWOptions) => AsyncIterable +} + +export interface BWOptions extends AbortOptions { + peer?: PeerId | CID | string + proto?: string + poll?: boolean + interval?: number +} + +export interface BWResult { + totalIn: BigInteger + totalOut: BigInteger + rateIn: BigInteger + rateOut: BigInteger +} diff --git a/packages/ipfs-core-types/src/store.ts b/packages/ipfs-core-types/src/store.ts deleted file mode 100644 index 4a8c5048b4..0000000000 --- a/packages/ipfs-core-types/src/store.ts +++ /dev/null @@ -1,124 +0,0 @@ - -import type { Await, AwaitIterable, AbortOptions } from './basic' - -export interface ValueStore { - get: (options?: AbortOptions) => Await - set: (value: T) => Await -} - -export interface KeyValueStore extends - StoreReader, - StoreExporter, - StoreSelector, - StoreLookup, - StoreWriter, - StoreImporter, - StoreEraser { -} - -// Interface Datastore - -export interface StoreReader { - /** - * The key retrieve the value for. - */ - get: (key: Key, options?: AbortOptions) => Await -} - -export interface StoreLookup { - /** - * Check for the existence of a given key - */ - has: (key: Key, options?: AbortOptions) => Await -} - -export interface StoreExporter { - /** - * Retrieve a stream of values stored under the given keys. - */ - getMany: (keys: AwaitIterable, options?: AbortOptions) => AwaitIterable -} - -export interface StoreSelector { - /** - * Search the store for some values. - */ - query: (query: Query, options?: AbortOptions) => AwaitIterable -} - -export interface StoreWriter { - /** - * Store a value with the given key. - */ - put: (key: Key, value: Value, options?: AbortOptions) => Await -} - -export interface StoreImporter { - /** - * Store many key-value pairs. - */ - putMany: (entries: AwaitIterable, options?: AbortOptions) => AwaitIterable -} - -export interface StoreEraser { - /** - * Delete the content stored under the given key. - */ - delete: (key: Key, options?: AbortOptions) => Await - /** - * Delete the content stored under the given keys. - */ - deleteMany: (keys: AwaitIterable, options?: AbortOptions) => AwaitIterable - -} -export interface StoreBatch { - batch: () => Batch -} - -export interface Batch { - put: (key: Key, value: Value) => void - delete: (key: Key) => void - - commit: (options?: AbortOptions) => Await -} - -export interface Resource { - /** - * Opens the datastore, this is only needed if the store was closed before, - * otherwise this is taken care of by the constructor. - */ - open: () => Await - /** - * Close the datastore, this should always be called to ensure resources - * are cleaned up. - */ - close: () => Await -} - -export interface Query { - /** - * Only return values where the key starts with this prefix - */ - prefix?: string - /** - * Filter the results according to the these functions - */ - filters?: Array<(resut: Entry) => boolean> - /** - * Order the results according to these functions - */ - orders?: Array<(results: Entry[]) => Entry[]> - /** - * Only return this many records - */ - limit?: number - /** - * An options object, all properties are optional - */ - options?: Options - /** - * A way to signal that the caller is no longer interested in the outcome of - * this operation - */ - signal?: AbortSignal -} diff --git a/packages/ipfs-core-types/src/swarm/index.d.ts b/packages/ipfs-core-types/src/swarm/index.d.ts new file mode 100644 index 0000000000..7115a3462e --- /dev/null +++ b/packages/ipfs-core-types/src/swarm/index.d.ts @@ -0,0 +1,41 @@ +import type { AbortOptions } from '../basic' +import { API as BitswapAPI } from '../bitswap' +import { API as RepoAPI } from '../repo' +import type PeerId from 'peer-id' +import type CID from 'cid' +import type BigInteger from 'bignumber.js' +import type Multiaddr from 'multiaddr' + +export interface API { + addrs: (options?: AbortOptions) => Promise + connect: (addr: Multiaddr, options?: AbortOptions) => Promise + disconnect: (addr: Multiaddr, options?: AbortOptions) => Promise + localAddrs: (options?: AbortOptions) => Promise + peers: (options?: PeersOptions) => Promise +} + +export interface AddrsResult { + id: string + addrs: Multiaddr[] +} + +export interface PeersOptions extends AbortOptions { + direction?: boolean + streams?: boolean + verbose?: boolean + latency?: boolean +} + +export enum PeerDirection { + Inbound = 0, + Outbound +} + +export interface PeersResult { + addr: Multiaddr + peer: string + latency?: string + muxer?: string + streams?: string[] + direction?: PeerDirection +} diff --git a/packages/ipfs-core-types/tsconfig.json b/packages/ipfs-core-types/tsconfig.json index f090f172de..60b3530b82 100644 --- a/packages/ipfs-core-types/tsconfig.json +++ b/packages/ipfs-core-types/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "noImplicitAny": true + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src" diff --git a/packages/ipfs-core-utils/package.json b/packages/ipfs-core-utils/package.json index 720dc04294..a40cbb7584 100644 --- a/packages/ipfs-core-utils/package.json +++ b/packages/ipfs-core-utils/package.json @@ -24,7 +24,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "aegir build --no-bundle", "test": "aegir test", "test:browser": "aegir test -t browser", "test:electron": "aegir test -t electron-main", @@ -40,7 +39,7 @@ "blob-to-it": "^1.0.1", "browser-readablestream-to-it": "^1.0.1", "cids": "^1.1.5", - "err-code": "^2.0.3", + "err-code": "^3.0.1", "ipfs-core-types": "^0.3.1", "ipfs-utils": "^6.0.1", "it-all": "^1.0.4", diff --git a/packages/ipfs-core-utils/src/files/format-mode.js b/packages/ipfs-core-utils/src/files/format-mode.js index 304ccc3464..53158d975f 100644 --- a/packages/ipfs-core-utils/src/files/format-mode.js +++ b/packages/ipfs-core-utils/src/files/format-mode.js @@ -16,6 +16,12 @@ const S_IROTH = parseInt('4', 8) // others have read permission const S_IWOTH = parseInt('2', 8) // others have write permission const S_IXOTH = parseInt('1', 8) // others have execute permission +/** + * @param {number} mode + * @param {number} perm + * @param {string} type + * @param {string[]} output + */ function checkPermission (mode, perm, type, output) { if ((mode & perm) === perm) { output.push(type) @@ -25,8 +31,7 @@ function checkPermission (mode, perm, type, output) { } /** - * - * @param {import('ipfs-core-types/src/files').Mode} mode + * @param {number} mode * @param {boolean} isDirectory * @returns {string} */ diff --git a/packages/ipfs-core-utils/src/files/format-mtime.js b/packages/ipfs-core-utils/src/files/format-mtime.js index 8195600efa..0e1d0b31ae 100644 --- a/packages/ipfs-core-utils/src/files/format-mtime.js +++ b/packages/ipfs-core-utils/src/files/format-mtime.js @@ -1,7 +1,7 @@ 'use strict' /** - * @param {import('ipfs-core-types/src/files').MTime} mtime + * @param {import('ipfs-unixfs').Mtime} mtime * @returns {string} */ function formatMtime (mtime) { @@ -9,7 +9,7 @@ function formatMtime (mtime) { return '-' } - const date = new Date((mtime.secs * 1000) + Math.round(mtime.nsecs / 1000)) + const date = new Date((mtime.secs * 1000) + Math.round((mtime.nsecs || 0) / 1000)) return date.toLocaleDateString(Intl.DateTimeFormat().resolvedOptions().locale, { year: 'numeric', diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js b/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js index e937dd1fbd..c5d11fccb1 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js @@ -3,6 +3,11 @@ const normaliseContent = require('./normalise-content.browser') const normaliseInput = require('./normalise-input') +/** + * @typedef {import('ipfs-core-types/src/basic').ImportSource} ImportSource + * @typedef {import('ipfs-core-types/src/basic').BrowserImportCandidate} BrowserImportCandidate + */ + /** * Transforms any of the `ipfs.add` input types into * @@ -12,7 +17,8 @@ const normaliseInput = require('./normalise-input') * * See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options * - * @param {import('ipfs-core-types/src/files').ImportSource} input - * @returns {AsyncIterable>} + * @param {ImportSource} input + * @returns {AsyncGenerator} */ +// @ts-ignore module.exports = (input) => normaliseInput(input, normaliseContent) diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.js b/packages/ipfs-core-utils/src/files/normalise-input/index.js index 3e30de6ce4..aea7eaa9fd 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.js @@ -3,6 +3,10 @@ const normaliseContent = require('./normalise-content') const normaliseInput = require('./normalise-input') +/** + * @typedef {import('ipfs-core-types/src/basic').ImportSource} ImportSource + */ + /** * Transforms any of the `ipfs.add` input types into * @@ -12,7 +16,6 @@ const normaliseInput = require('./normalise-input') * * See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options * - * @param {import('ipfs-core-types/src/files').ImportSource} input - * @returns {AsyncIterable>>} + * @param {ImportSource} input */ module.exports = (input) => normaliseInput(input, normaliseContent) diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js index a2bdc54fea..10efa46351 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js @@ -13,7 +13,6 @@ const { /** * @param {import('./normalise-input').ToContent} input - * @returns {Promise} */ async function toBlob (input) { // Bytes @@ -37,7 +36,7 @@ async function toBlob (input) { } // (Async)Iterator - if (input[Symbol.iterator] || input[Symbol.asyncIterator]) { + if (Symbol.iterator in input || Symbol.asyncIterator in input) { /** @type {any} peekable */ const peekable = itPeekable(input) @@ -67,7 +66,6 @@ async function toBlob (input) { /** * @param {AsyncIterable|Iterable} stream - * @returns {Promise} */ async function itToBlob (stream) { const parts = [] diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js index db205d62c5..a959547474 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js @@ -15,7 +15,6 @@ const { /** * @param {import('./normalise-input').ToContent} input - * @returns {AsyncIterable} */ async function * toAsyncIterable (input) { // Bytes | String @@ -41,7 +40,7 @@ async function * toAsyncIterable (input) { } // (Async)Iterator - if (input[Symbol.iterator] || input[Symbol.asyncIterator]) { + if (Symbol.iterator in input || Symbol.asyncIterator in input) { /** @type {any} peekable */ const peekable = itPeekable(input) @@ -73,9 +72,7 @@ async function * toAsyncIterable (input) { } /** - * * @param {ArrayBuffer | ArrayBufferView | string | InstanceType | number[]} chunk - * @returns {Uint8Array} */ function toBytes (chunk) { if (chunk instanceof Uint8Array) { diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js index 79365847d8..23b22016d1 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js @@ -13,16 +13,14 @@ const { modeToNumber } = require('./utils') -// eslint-disable-next-line complexity - /** - * @typedef {import('ipfs-core-types/src/files').ToContent} ToContent + * @typedef {import('ipfs-core-types/src/basic').ToContent} ToContent + * @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImportCandidate */ + /** - * @template {Blob|AsyncIterable} Content - * @param {import('ipfs-core-types/src/files').ImportSource} input - * @param {(content:ToContent) => Content|Promise} normaliseContent - * @returns {AsyncIterable>} + * @param {import('ipfs-core-types/src/basic').ImportSource} input + * @param {(content:ToContent) => AsyncIterable} normaliseContent */ // eslint-disable-next-line complexity module.exports = async function * normaliseInput (input, normaliseContent) { @@ -50,7 +48,7 @@ module.exports = async function * normaliseInput (input, normaliseContent) { } // Iterable - if (input[Symbol.iterator] || input[Symbol.asyncIterator]) { + if (Symbol.iterator in input || Symbol.asyncIterator in input) { /** @type {any} peekable */ const peekable = itPeekable(input) @@ -102,16 +100,19 @@ module.exports = async function * normaliseInput (input, normaliseContent) { } /** - * @template {Blob|AsyncIterable} Content - * @param {import('ipfs-core-types/src/files').ToEntry} input - * @param {(content:ToContent) => Content|Promise} normaliseContent - * @returns {Promise>} + * @param {import('ipfs-core-types/src/basic').ToEntry} input + * @param {(content:ToContent) => AsyncIterable} normaliseContent */ async function toFileObject (input, normaliseContent) { // @ts-ignore - Those properties don't exist on most input types const { path, mode, mtime, content } = input - const file = { path: path || '', mode: modeToNumber(mode), mtime: mtimeToObject(mtime) } + /** @type {ImportCandidate} */ + const file = { + path: path || '', + mode: modeToNumber(mode), + mtime: mtimeToObject(mtime) + } if (content) { file.content = await normaliseContent(content) diff --git a/packages/ipfs-core-utils/src/files/normalise-input/utils.js b/packages/ipfs-core-utils/src/files/normalise-input/utils.js index defceec5e8..d2f1646a06 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/utils.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/utils.js @@ -22,7 +22,7 @@ function isBlob (obj) { * An object with a path or content property * * @param {any} obj - * @returns {obj is import('ipfs-core-types/src/files').ToEntry} + * @returns {obj is import('ipfs-core-types/src/basic').ToEntry} */ function isFileObject (obj) { return typeof obj === 'object' && (obj.path || obj.content) @@ -95,8 +95,7 @@ function mtimeToObject (mtime) { } /** - * @param {any} mode - * @returns {number|undefined} + * @param {number | string | null | undefined} mode */ function modeToNumber (mode) { if (mode == null) { diff --git a/packages/ipfs-core-utils/src/pins/normalise-input.js b/packages/ipfs-core-utils/src/pins/normalise-input.js index fc4299b7ce..e7dc21dd24 100644 --- a/packages/ipfs-core-utils/src/pins/normalise-input.js +++ b/packages/ipfs-core-utils/src/pins/normalise-input.js @@ -3,6 +3,22 @@ const errCode = require('err-code') const CID = require('cids') +/** + * @typedef {Object} Pinnable + * @property {string | InstanceType | CID} [path] + * @property {CID} [cid] + * @property {boolean} [recursive] + * @property {any} [metadata] + * + * @typedef {CID|string|InstanceType|Pinnable} ToPin + * @typedef {ToPin|Iterable|AsyncIterable} Source + * + * @typedef {Object} Pin + * @property {string|CID} path + * @property {boolean} recursive + * @property {any} [metadata] + */ + /** * Transform one of: * @@ -55,7 +71,8 @@ module.exports = async function * normaliseInput (input) { } // Iterable - if (input[Symbol.iterator]) { + if (Symbol.iterator in input) { + // @ts-ignore const iterator = input[Symbol.iterator]() const first = iterator.next() if (first.done) return iterator @@ -82,7 +99,8 @@ module.exports = async function * normaliseInput (input) { } // AsyncIterable - if (input[Symbol.asyncIterator]) { + if (Symbol.asyncIterator in input) { + // @ts-ignore const iterator = input[Symbol.asyncIterator]() const first = await iterator.next() if (first.done) return iterator @@ -112,12 +130,18 @@ module.exports = async function * normaliseInput (input) { } /** - * @param {ToPinWithPath|ToPinWithCID} input - * @returns {Pin} + * @param {Pinnable} input */ function toPin (input) { + const path = input.cid || `${input.path}` + + if (!path) { + throw errCode(new Error('Unexpected input: Please path either a CID or an IPFS path'), 'ERR_UNEXPECTED_INPUT') + } + + /** @type {Pin} */ const pin = { - path: input.path == null ? input.cid : `${input.path}`, + path, recursive: input.recursive !== false } @@ -127,25 +151,3 @@ function toPin (input) { return pin } - -/** - * @typedef {Object} ToPinWithPath - * @property {string | InstanceType | CID} path - * @property {undefined} [cid] - * @property {boolean} [recursive] - * @property {any} [metadata] - * - * @typedef {Object} ToPinWithCID - * @property {undefined} [path] - * @property {CID} cid - * @property {boolean} [recursive] - * @property {any} [metadata] - * - * @typedef {CID|string|InstanceType|ToPinWithPath|ToPinWithPath} ToPin - * @typedef {ToPin|Iterable|AsyncIterable} Source - * - * @typedef {Object} Pin - * @property {string|CID} path - * @property {boolean} recursive - * @property {any} [metadata] - */ diff --git a/packages/ipfs-core-utils/src/to-url-string.js b/packages/ipfs-core-utils/src/to-url-string.js index c74f0e601a..eb03bd0c69 100644 --- a/packages/ipfs-core-utils/src/to-url-string.js +++ b/packages/ipfs-core-utils/src/to-url-string.js @@ -1,6 +1,7 @@ 'use strict' const multiaddr = require('multiaddr') +// @ts-ignore const multiAddrToUri = require('multiaddr-to-uri') /** diff --git a/packages/ipfs-core-utils/src/with-timeout-option.js b/packages/ipfs-core-utils/src/with-timeout-option.js index 70e159263f..ccbca756cb 100644 --- a/packages/ipfs-core-utils/src/with-timeout-option.js +++ b/packages/ipfs-core-utils/src/with-timeout-option.js @@ -1,6 +1,7 @@ /* eslint-disable no-unreachable */ 'use strict' +// @ts-ignore const TimeoutController = require('timeout-abort-controller') const { anySignal } = require('any-signal') const { default: parseDuration } = require('parse-duration') @@ -53,9 +54,11 @@ function withTimeoutOption (fn, optionsArgIndex) { } } + // @ts-ignore if (fnRes[Symbol.asyncIterator]) { // @ts-ignore return (async function * () { + // @ts-ignore const it = fnRes[Symbol.asyncIterator]() try { diff --git a/packages/ipfs-core-utils/tsconfig.json b/packages/ipfs-core-utils/tsconfig.json index 979a39adab..8a6208361c 100644 --- a/packages/ipfs-core-utils/tsconfig.json +++ b/packages/ipfs-core-utils/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-core/package.json b/packages/ipfs-core/package.json index 0f80960762..c6b5eccc80 100644 --- a/packages/ipfs-core/package.json +++ b/packages/ipfs-core/package.json @@ -38,7 +38,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "npm run build", "build": "aegir build", "lint": "aegir lint", "test": "aegir test", @@ -56,35 +55,34 @@ "abort-controller": "^3.0.0", "array-shuffle": "^2.0.0", "bignumber.js": "^9.0.1", - "cbor": "^6.0.1", + "cborg": "^1.1.0", "cids": "^1.1.5", - "class-is": "^1.1.0", "dag-cbor-links": "^2.0.0", "datastore-core": "^3.0.0", - "datastore-pubsub": "^0.4.1", + "datastore-pubsub": "ipfs/js-datastore-pubsub#chore/add-types", "debug": "^4.1.1", "dlv": "^1.1.3", - "err-code": "^2.0.3", - "hamt-sharding": "^1.0.0", + "err-code": "^3.0.1", + "hamt-sharding": "^2.0.0", "hashlru": "^2.3.0", "interface-datastore": "^3.0.3", - "ipfs-bitswap": "^4.0.2", - "ipfs-block-service": "^0.18.0", + "ipfs-bitswap": "^5.0.1", + "ipfs-block-service": "^0.19.0", "ipfs-core-types": "^0.3.1", "ipfs-core-utils": "^0.7.2", - "ipfs-repo": "^8.0.0", - "ipfs-unixfs": "^2.0.3", - "ipfs-unixfs-exporter": "^3.0.4", - "ipfs-unixfs-importer": "^5.0.0", + "ipfs-repo": "^9.0.0", + "ipfs-unixfs": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs?fix/declare-interface-types-in-d-ts", + "ipfs-unixfs-exporter": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs-exporter?fix/declare-interface-types-in-d-ts", + "ipfs-unixfs-importer": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs-importer?fix/declare-interface-types-in-d-ts", "ipfs-utils": "^6.0.1", - "ipld": "^0.28.0", + "ipld": "^0.29.0", "ipld-block": "^0.11.0", - "ipld-dag-cbor": "^0.17.0", - "ipld-dag-pb": "^0.20.0", - "ipld-raw": "^6.0.0", - "ipns": "^0.8.0", + "ipld-dag-cbor": "^0.18.0", + "ipld-dag-pb": "^0.22.0", + "ipld-raw": "^7.0.0", + "ipns": "^0.10.0", "is-domain-name": "^1.0.1", - "is-ipfs": "^2.0.0", + "is-ipfs": "^4.0.0", "it-all": "^1.0.4", "it-first": "^1.0.4", "it-last": "^1.0.4", @@ -94,11 +92,11 @@ "libp2p-crypto": "^0.19.0", "libp2p-floodsub": "^0.24.1", "libp2p-gossipsub": "^0.8.0", - "libp2p-kad-dht": "^0.20.1", + "libp2p-kad-dht": "^0.21.0", "libp2p-mdns": "^0.15.0", "libp2p-mplex": "^0.10.0", "libp2p-noise": "^2.0.1", - "libp2p-record": "^0.9.0", + "libp2p-record": "^0.10.0", "libp2p-tcp": "^0.15.1", "libp2p-webrtc-star": "^0.21.0", "libp2p-websockets": "^0.15.1", @@ -118,8 +116,9 @@ "uint8arrays": "^2.1.3" }, "devDependencies": { + "@types/dlv": "^1.1.2", "aegir": "^31.0.0", - "delay": "^4.4.0", + "delay": "^5.0.0", "go-ipfs": "0.8.0", "interface-ipfs-core": "^0.144.2", "ipfsd-ctl": "^7.2.0", diff --git a/packages/ipfs-core/src/components/add-all/index.js b/packages/ipfs-core/src/components/add-all/index.js index 9de540a7d6..5d14f5685d 100644 --- a/packages/ipfs-core/src/components/add-all/index.js +++ b/packages/ipfs-core/src/components/add-all/index.js @@ -1,18 +1,23 @@ 'use strict' -const importer = require('ipfs-unixfs-importer') +const { importer } = require('ipfs-unixfs-importer') const normaliseAddInput = require('ipfs-core-utils/src/files/normalise-input/index') const { parseChunkerString } = require('./utils') const { pipe } = require('it-pipe') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) +/** + * @typedef {import('cids')} CID + * @typedef {import('ipfs-unixfs-importer').ImportResult} ImportResult + */ + /** * @typedef {Object} Context - * @property {import('..').Block} block - * @property {import('..').GCLock} gcLock - * @property {import('..').Preload} preload - * @property {import('..').Pin} pin + * @property {import('ipfs-core-types/src/block').API} block + * @property {import('../gc-lock').GCLock} gcLock + * @property {import('../../types').Preload} preload + * @property {import('ipfs-core-types/src/pin').API} pin * @property {import('ipfs-core-types/src/root').ShardingOptions} [options] * * @param {Context} context @@ -21,11 +26,7 @@ module.exports = ({ block, gcLock, preload, pin, options }) => { const isShardingEnabled = options && options.sharding /** - * Import multiple files and data into IPFS. - * - * @param {import('ipfs-core-types/src/files').ImportSource} source - * @param {import('ipfs-core-types/src/root').AddAllOptions} [options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/root').API["addAll"]} */ async function * addAll (source, options = {}) { const opts = mergeOptions({ @@ -63,11 +64,16 @@ module.exports = ({ block, gcLock, preload, pin, options }) => { delete opts.trickle + /** @type {Record} */ const totals = {} if (opts.progress) { const prog = opts.progress + /** + * @param {number} bytes + * @param {string} path + */ opts.progress = (bytes, path) => { if (!totals[path]) { totals[path] = 0 @@ -81,6 +87,9 @@ module.exports = ({ block, gcLock, preload, pin, options }) => { const iterator = pipe( normaliseAddInput(source), + /** + * @param {AsyncIterable} source + */ source => importer(source, block, { ...opts, pin: false @@ -107,8 +116,14 @@ module.exports = ({ block, gcLock, preload, pin, options }) => { return withTimeoutOption(addAll) } +/** + * @param {import('ipfs-core-types/src/root').AddAllOptions} opts + */ function transformFile (opts) { - return async function * (source) { + /** + * @param {AsyncGenerator} source + */ + async function * transformFile (source) { for await (const file of source) { let cid = file.cid @@ -131,10 +146,19 @@ function transformFile (opts) { } } } + + return transformFile } +/** + * @param {(cid: CID) => void} preload + * @param {import('ipfs-core-types/src/root').AddAllOptions} opts + */ function preloadFile (preload, opts) { - return async function * (source) { + /** + * @param {AsyncGenerator} source + */ + async function * maybePreloadFile (source) { for await (const file of source) { const isRootFile = !file.path || opts.wrapWithDirectory ? file.path === '' @@ -149,14 +173,23 @@ function preloadFile (preload, opts) { yield file } } + + return maybePreloadFile } +/** + * @param {import('ipfs-core-types/src/pin').API} pin + * @param {import('ipfs-core-types/src/root').AddAllOptions} opts + */ function pinFile (pin, opts) { - return async function * (source) { + /** + * @param {AsyncGenerator} source + */ + async function * maybePinFile (source) { for await (const file of source) { // Pin a file if it is the root dir of a recursive add or the single file // of a direct add. - const isRootDir = !file.path.includes('/') + const isRootDir = !file.path?.includes('/') const shouldPin = (opts.pin == null ? true : opts.pin) && isRootDir && !opts.onlyHash if (shouldPin) { @@ -171,4 +204,6 @@ function pinFile (pin, opts) { yield file } } + + return maybePinFile } diff --git a/packages/ipfs-core/src/components/add.js b/packages/ipfs-core/src/components/add.js index 15fe4ca229..16781cad3e 100644 --- a/packages/ipfs-core/src/components/add.js +++ b/packages/ipfs-core/src/components/add.js @@ -3,23 +3,15 @@ const last = require('it-last') /** - * @typedef {Object} Context - * @property {ReturnType} addAll - * - * @param {Context} context + * @param {Object} context + * @param {import('ipfs-core-types/src/root').API["addAll"]} context.addAll */ module.exports = ({ addAll }) => { /** - * Import a file or data into IPFS. - * - * @param {import('ipfs-core-types/src/files').ToEntry} entry - * @param {import('ipfs-core-types/src/root').AddAllOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/root').API["add"]} */ async function add (entry, options) { - /** @type {import('ipfs-core-types/src/files').ImportSource} */ - const source = (entry) - const result = await last(addAll(source, options)) + const result = await last(addAll([entry], options)) // Note this should never happen as `addAll` should yield at least one item // but to satisfy type checker we perfom this check and for good measure // throw an error in case it does happen. diff --git a/packages/ipfs-core/src/components/bitswap/index.js b/packages/ipfs-core/src/components/bitswap/index.js index 8209007a75..a320ae3d73 100644 --- a/packages/ipfs-core/src/components/bitswap/index.js +++ b/packages/ipfs-core/src/components/bitswap/index.js @@ -5,6 +5,13 @@ const createWantlistForPeer = require('./wantlist-for-peer') const createUnwant = require('./unwant') const createStat = require('./stat') +/** + * @typedef {import('../../types').NetworkService} NetworkService + * @typedef {import('peer-id')} PeerId + * @typedef {import('cids')} CID + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ + class BitswapAPI { /** * @param {Object} config @@ -18,10 +25,3 @@ class BitswapAPI { } } module.exports = BitswapAPI - -/** - * @typedef {import('..').NetworkService} NetworkService - * @typedef {import('..').PeerId} PeerId - * @typedef {import('..').CID} CID - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/bitswap/stat.js b/packages/ipfs-core/src/components/bitswap/stat.js index 279c77a67a..4d84e9fda1 100644 --- a/packages/ipfs-core/src/components/bitswap/stat.js +++ b/packages/ipfs-core/src/components/bitswap/stat.js @@ -6,37 +6,15 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Show diagnostic information on the bitswap agent. - * Note: `bitswap.stat` and `stats.bitswap` can be used interchangeably. - * - * @example - * ```js - * const stats = await ipfs.bitswap.stat() - * console.log(stats) - * // { - * // provideBufLen: 0, - * // wantlist: [ CID('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM') ], - * // peers: - * // [ 'QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM', - * // 'QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu', - * // 'QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd' ], - * // blocksReceived: 0, - * // dataReceived: 0, - * // blocksSent: 0, - * // dataSent: 0, - * // dupBlksReceived: 0, - * // dupDataReceived: 0 - * // } - * ``` - * @param {import('.').AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/bitswap').API["stat"]} */ async function stat (options) { - const { bitswap } = await network.use(options) + /** @type {import('ipfs-bitswap')} */ + const bitswap = (await network.use(options)).bitswap const snapshot = bitswap.stat().snapshot return { @@ -54,16 +32,3 @@ module.exports = ({ network }) => { return withTimeoutOption(stat) } - -/** - * @typedef {object} BitswapStats - An object that contains information about the bitswap agent - * @property {number} provideBufLen - an integer - * @property {CID[]} wantlist - * @property {CID[]} peers - array of peer IDs - * @property {Big} blocksReceived - * @property {Big} dataReceived - * @property {Big} blocksSent - * @property {Big} dataSent - * @property {Big} dupBlksReceived - * @property {Big} dupDataReceived - */ diff --git a/packages/ipfs-core/src/components/bitswap/unwant.js b/packages/ipfs-core/src/components/bitswap/unwant.js index bcfc308c36..19d38ed5b3 100644 --- a/packages/ipfs-core/src/components/bitswap/unwant.js +++ b/packages/ipfs-core/src/components/bitswap/unwant.js @@ -6,28 +6,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Removes one or more CIDs from the wantlist - * - * @example - * ```JavaScript - * let list = await ipfs.bitswap.wantlist() - * console.log(list) - * // [ CID('QmHash') ] - * - * await ipfs.bitswap.unwant(cid) - * - * list = await ipfs.bitswap.wantlist() - * console.log(list) - * // [] - * ``` - * - * @param {CID | CID[]} cids - The CIDs to remove from the wantlist - * @param {AbortOptions} [options] - * @returns {Promise} - A promise that resolves once the request is complete + * @type {import('ipfs-core-types/src/bitswap').API["unwant"]} */ async function unwant (cids, options) { const { bitswap } = await network.use(options) @@ -42,12 +25,8 @@ module.exports = ({ network }) => { throw errCode(err, 'ERR_INVALID_CID') } - return bitswap.unwant(cids, options) + return bitswap.unwant(cids) } return withTimeoutOption(unwant) } - -/** - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js b/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js index 12d9958da0..d22db8031f 100644 --- a/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js +++ b/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js @@ -5,23 +5,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Returns the wantlist for a connected peer - * - * @example - * ```js - * const list = await ipfs.bitswap.wantlistForPeer(peerId) - * console.log(list) - * // [ CID('QmHash') ] - * ``` - * - * @param {PeerId | CID | string | Uint8Array} peerId - A peer ID to return the wantlist for\ - * @param {AbortOptions} [options] - * @returns {Promise} - An array of CIDs currently in the wantlist - * + * @type {import('ipfs-core-types/src/bitswap').API["wantlistForPeer"]} */ async function wantlistForPeer (peerId, options = {}) { const { bitswap } = await network.use(options) @@ -32,16 +20,3 @@ module.exports = ({ network }) => { return withTimeoutOption(wantlistForPeer) } - -/** - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - */ - -/** - * @template ExtraOptions - * @callback WantlistForPeer - * @param {PeerId | CID | string | Uint8Array} peerId - * @param {AbortOptions & ExtraOptions} [options] - * @returns {Promise} - */ diff --git a/packages/ipfs-core/src/components/bitswap/wantlist.js b/packages/ipfs-core/src/components/bitswap/wantlist.js index 9f2e0d0298..5acaafdedd 100644 --- a/packages/ipfs-core/src/components/bitswap/wantlist.js +++ b/packages/ipfs-core/src/components/bitswap/wantlist.js @@ -4,33 +4,18 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Returns the wantlist for your node - * - * @example - * ```js - * const list = await ipfs.bitswap.wantlist() - * console.log(list) - * // [ CID('QmHash') ] - * ``` - * - * @param {AbortOptions} [options] - * @returns {Promise} - An array of CIDs currently in the wantlist. + * @type {import('ipfs-core-types/src/bitswap').API["wantlist"]} */ async function wantlist (options = {}) { const { bitswap } = await network.use(options) - const list = bitswap.getWantlist(options) + const list = bitswap.getWantlist() return Array.from(list).map(e => e[1].cid) } return withTimeoutOption(wantlist) } - -/** - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - */ diff --git a/packages/ipfs-core/src/components/block/get.js b/packages/ipfs-core/src/components/block/get.js index 42ea8b22ff..772e15f76f 100644 --- a/packages/ipfs-core/src/components/block/get.js +++ b/packages/ipfs-core/src/components/block/get.js @@ -5,22 +5,12 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').BlockService} config.blockService - * @param {import('.').Preload} config.preload + * @param {import('ipfs-block-service')} config.blockService + * @param {import('../../types').Preload} config.preload */ module.exports = ({ blockService, preload }) => { /** - * Get a raw IPFS block. - * - * @param {CID | string | Uint8Array} cid - A CID that corresponds to the desired block - * @param {GetOptions & AbortOptions} [options] - * @returns {Promise} - A Block type object, containing both the data and the hash of the block - * - * @example - * ```js - * const block = await ipfs.block.get(cid) - * console.log(block.data) - * ``` + * @type {import('ipfs-core-types/src/block').API["get"]} */ async function get (cid, options = {}) { // eslint-disable-line require-await cid = cleanCid(cid) @@ -34,12 +24,3 @@ module.exports = ({ blockService, preload }) => { return withTimeoutOption(get) } - -/** - * @typedef {Object} GetOptions - * @property {boolean} [preload=true] - * - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - * @typedef {import('.').IPLDBlock} IPLDBlock - */ diff --git a/packages/ipfs-core/src/components/block/index.js b/packages/ipfs-core/src/components/block/index.js index 252a49d115..fd11eaf27c 100644 --- a/packages/ipfs-core/src/components/block/index.js +++ b/packages/ipfs-core/src/components/block/index.js @@ -5,6 +5,14 @@ const createPut = require('./put') const createRm = require('./rm') const createStat = require('./stat') +/** + * @typedef {import('../../types').Preload} Preload + * @typedef {import('ipfs-block-service')} BlockService + * @typedef {import('../gc-lock').GCLock} GCLock + * @typedef {import('ipfs-core-types/src/pin').API} Pin + * @typedef {import('../pin/pin-manager')} PinManager + */ + class BlockAPI { /** * @param {Object} config @@ -23,14 +31,3 @@ class BlockAPI { } module.exports = BlockAPI - -/** - * @typedef {import('..').Preload} Preload - * @typedef {import('..').BlockService} BlockService - * @typedef {import('..').GCLock} GCLock - * @typedef {import('..').Pin} Pin - * @typedef {import('..').PinManager} PinManager - * @typedef {import('..').AbortOptions} AbortOptions - * @typedef {import('..').CID} CID - * @typedef {import('..').IPLDBlock} IPLDBlock - */ diff --git a/packages/ipfs-core/src/components/block/put.js b/packages/ipfs-core/src/components/block/put.js index 4c65a0da55..77002b2cb0 100644 --- a/packages/ipfs-core/src/components/block/put.js +++ b/packages/ipfs-core/src/components/block/put.js @@ -6,54 +6,20 @@ const CID = require('cids') const isIPFS = require('is-ipfs') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('cids').CIDVersion} CIDVersion + */ + /** * @param {Object} config - * @param {import('.').BlockService} config.blockService - * @param {import('.').Pin} config.pin + * @param {import('ipfs-block-service')} config.blockService + * @param {import('ipfs-core-types/src/pin').API} config.pin * @param {import('.').GCLock} config.gcLock - * @param {import('.').Preload} config.preload + * @param {import('../../types').Preload} config.preload */ module.exports = ({ blockService, pin, gcLock, preload }) => { /** - * Stores input as an IPFS block. - * - * **Note:** If you pass a `Block` instance as the block parameter, you - * don't need to pass options, as the block instance will carry the CID - * value as a property. - * - * @example - * ```js - * // Defaults - * const encoder = new TextEncoder() - * const decoder = new TextDecoder() - * - * const bytes = encoder.encode('a serialized object') - * const block = await ipfs.block.put(bytes) - * - * console.log(decoder.decode(block.data)) - * // Logs: - * // a serialized object - * console.log(block.cid.toString()) - * // Logs: - * // the CID of the object - * - * // With custom format and hashtype through CID - * const CID = require('cids') - * const another = encoder.encode('another serialized object') - * const cid = new CID(1, 'dag-pb', multihash) - * const block = await ipfs.block.put(another, cid) - * console.log(decoder.decode(block.data)) - * - * // Logs: - * // a serialized object - * console.log(block.cid.toString()) - * // Logs: - * // the CID of the object - * ``` - * - * @param {IPLDBlock|Uint8Array} block - The block or data to store - * @param {PutOptions & AbortOptions} [options] - **Note:** If you pass a `Block` instance as the block parameter, you don't need to pass options, as the block instance will carry the CID value as a property. - * @returns {Promise} - A Block type object, containing both the data and the hash of the block + * @type {import('ipfs-core-types/src/block').API["put"]} */ async function put (block, options = {}) { if (Array.isArray(block)) { @@ -115,18 +81,3 @@ module.exports = ({ blockService, pin, gcLock, preload }) => { return withTimeoutOption(put) } - -/** - * @typedef {Object} PutOptions - * @property {CID} [cid] - A CID to store the block under (default: `undefined`) - * @property {string} [format='dag-pb'] - The codec to use to create the CID (default: `'dag-pb'`) - * @property {import('multihashes').HashName} [mhtype='sha2-256'] - The hashing algorithm to use to create the CID (default: `'sha2-256'`) - * @property {number} [mhlen] - * @property {CIDVersion} [version=0] - The version to use to create the CID (default: `0`) - * @property {boolean} [pin=false] - If true, pin added blocks recursively (default: `false`) - * @property {boolean} [preload] - * - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').IPLDBlock} IPLDBlock - * @typedef {0|1} CIDVersion - */ diff --git a/packages/ipfs-core/src/components/block/rm.js b/packages/ipfs-core/src/components/block/rm.js index 4f05a387f5..3ac995c375 100644 --- a/packages/ipfs-core/src/components/block/rm.js +++ b/packages/ipfs-core/src/components/block/rm.js @@ -12,29 +12,13 @@ const BLOCK_RM_CONCURRENCY = 8 /** * @param {Object} config - * @param {import('.').BlockService} config.blockService - * @param {import('.').PinManager} config.pinManager + * @param {import('ipfs-block-service')} config.blockService + * @param {import('../pin/pin-manager')} config.pinManager * @param {import('.').GCLock} config.gcLock */ module.exports = ({ blockService, gcLock, pinManager }) => { /** - /** - * Remove one or more IPFS block(s). - * - * @param {CID[]|CID} cids - CID(s) corresponding to the block(s) to be removed. - * @param {RmOptions & AbortOptions} [options] - * @returns {AsyncIterable} - * - * @example - * ```js - * for await (const result of ipfs.block.rm(cid)) { - * if (result.error) { - * console.error(`Failed to remove block ${result.cid} due to ${result.error.message}`) - * } else { - * console.log(`Removed block ${result.cid}`) - * } - * } - * ``` + * @type {import('ipfs-core-types/src/block').API["rm"]} */ async function * rm (cids, options = {}) { if (!Array.isArray(cids)) { @@ -51,6 +35,7 @@ module.exports = ({ blockService, gcLock, pinManager }) => { parallelMap(BLOCK_RM_CONCURRENCY, async cid => { cid = cleanCid(cid) + /** @type {import('ipfs-core-types/src/block').RmResult} */ const result = { cid } try { @@ -58,10 +43,10 @@ module.exports = ({ blockService, gcLock, pinManager }) => { if (pinResult.pinned) { if (CID.isCID(pinResult.reason)) { // eslint-disable-line max-depth - throw errCode(new Error(`pinned via ${pinResult.reason}`)) + throw errCode(new Error(`pinned via ${pinResult.reason}`), 'ERR_BLOCK_PINNED') } - throw errCode(new Error(`pinned: ${pinResult.reason}`)) + throw errCode(new Error(`pinned: ${pinResult.reason}`), 'ERR_BLOCK_PINNED') } // remove has check when https://github.com/ipfs/js-ipfs-block-service/pull/88 is merged @@ -91,24 +76,3 @@ module.exports = ({ blockService, gcLock, pinManager }) => { return withTimeoutOption(rm) } - -/** - * @typedef {Object} RmOptions - * @property {boolean} [force=false] - Ignores nonexistent blocks - * @property {boolean} [quiet=false] - Write minimal output - * - * @typedef {import('.').AbortOptions} AbortOptions - * - * @typedef {RmSucceess|RmFailure} RmResult - * Note: If an error is present for a given object, the block with - * that cid was not removed and the error will contain the reason why, - * for example if the block was pinned. - * - * @typedef {Object} RmSucceess - * @property {CID} cid - * @property {void} [error] - * - * @typedef {Object} RmFailure - * @property {CID} cid - * @property {Error} error - */ diff --git a/packages/ipfs-core/src/components/block/stat.js b/packages/ipfs-core/src/components/block/stat.js index 080530fd7b..191479ccb7 100644 --- a/packages/ipfs-core/src/components/block/stat.js +++ b/packages/ipfs-core/src/components/block/stat.js @@ -5,27 +5,13 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').BlockService} config.blockService - * @param {import('.').Preload} config.preload + * @param {import('ipfs-block-service')} config.blockService + * @param {import('../../types').Preload} config.preload */ module.exports = ({ blockService, preload }) => { /** - /** - * Print information of a raw IPFS block. - * - * @param {CID} cid - CID of the block to get a stats for. - * @param {StatOptions & AbortOptions} options - * @returns {Promise} - * @example - * ```js - * const cid = CID.from('QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ') - * const stats = await ipfs.block.stat(cid) - * console.log(stats.cid.toString()) - * // Logs: QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ - * console.log(stat.size) - * // Logs: 3739 - * ``` + * @type {import('ipfs-core-types/src/block').API["stat"]} */ async function stat (cid, options = {}) { cid = cleanCid(cid) @@ -41,16 +27,3 @@ module.exports = ({ blockService, preload }) => { return withTimeoutOption(stat) } - -/** - * @typedef {Object} Stat - * An object containing the block's info - * @property {CID} cid - * @property {number} size - * - * @typedef {Object} StatOptions - * @property {boolean} [preload] - * - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - */ diff --git a/packages/ipfs-core/src/components/bootstrap/add.js b/packages/ipfs-core/src/components/bootstrap/add.js index d7b44421a9..47275d179c 100644 --- a/packages/ipfs-core/src/components/bootstrap/add.js +++ b/packages/ipfs-core/src/components/bootstrap/add.js @@ -5,37 +5,27 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * Add a peer address to the bootstrap list - * - * @param {Multiaddr} multiaddr - The address of a network peer - * @param {AbortOptions} [options] - * @returns {Promise} - * @example - * ```js - * const validIp4 = '/ip4/104....9z' - * - * const res = await ipfs.bootstrap.add(validIp4) - * console.log(res.Peers) - * // Logs: - * // ['/ip4/104....9z'] - * ``` + * @type {import('ipfs-core-types/src/bootstrap').API["add"]} */ async function add (multiaddr, options = {}) { if (!isValidMultiaddr(multiaddr)) { throw new Error(`${multiaddr} is not a valid Multiaddr`) } + /** @type {import('ipfs-core-types/src/config').Config} */ + // @ts-ignore repo returns type unknown const config = await repo.config.getAll(options) - // @ts-ignore - May not have `Bootstrap` - if (config.Bootstrap.indexOf(multiaddr.toString()) === -1) { - // @ts-ignore - May not have `Bootstrap` - config.Bootstrap.push(multiaddr.toString()) - } + const boostrappers = config.Bootstrap || [] + boostrappers.push(multiaddr.toString()) + + config.Bootstrap = Array.from( + new Set(boostrappers) + ).sort((a, b) => a.localeCompare(b)) await repo.config.replace(config) @@ -46,10 +36,3 @@ module.exports = ({ repo }) => { return withTimeoutOption(add) } - -/** - * @typedef {import('./utils').Peers} Peers - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - * @typedef {import('.').Multiaddr} Multiaddr - */ diff --git a/packages/ipfs-core/src/components/bootstrap/clear.js b/packages/ipfs-core/src/components/bootstrap/clear.js index a755451a81..f4a8e672a9 100644 --- a/packages/ipfs-core/src/components/bootstrap/clear.js +++ b/packages/ipfs-core/src/components/bootstrap/clear.js @@ -5,23 +5,15 @@ const Multiaddr = require('multiaddr') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * Remove all peer addresses from the bootstrap list - * - * @param {AbortOptions} options - * @returns {Promise} - * @example - * ```js - * const res = await ipfs.bootstrap.clear() - * console.log(res.Peers) - * // Logs: - * // [address1, address2, ...] - * ``` + * @type {import('ipfs-core-types/src/bootstrap').API["clear"]} */ async function clear (options = {}) { + /** @type {import('ipfs-core-types/src/config').Config} */ + // @ts-ignore repo returns type unknown const config = await repo.config.getAll(options) const removed = config.Bootstrap || [] config.Bootstrap = [] @@ -33,8 +25,3 @@ module.exports = ({ repo }) => { return withTimeoutOption(clear) } - -/** - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('./utils').Peers} Peers - */ diff --git a/packages/ipfs-core/src/components/bootstrap/index.js b/packages/ipfs-core/src/components/bootstrap/index.js index 3cb7c9dafa..72a568d9c6 100644 --- a/packages/ipfs-core/src/components/bootstrap/index.js +++ b/packages/ipfs-core/src/components/bootstrap/index.js @@ -8,7 +8,7 @@ const createRm = require('./rm') class BootstrapAPI { /** * @param {Object} config - * @param {Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ constructor ({ repo }) { this.add = createAdd({ repo }) @@ -19,10 +19,3 @@ class BootstrapAPI { } } module.exports = BootstrapAPI - -/** - * @typedef {import('..').Repo} Repo - * @typedef {import('..').AbortOptions} AbortOptions - * @typedef {import('..').CID} CID - * @typedef {import('..').Multiaddr} Multiaddr - */ diff --git a/packages/ipfs-core/src/components/bootstrap/list.js b/packages/ipfs-core/src/components/bootstrap/list.js index 87785d3df6..2d0368b5f9 100644 --- a/packages/ipfs-core/src/components/bootstrap/list.js +++ b/packages/ipfs-core/src/components/bootstrap/list.js @@ -5,21 +5,11 @@ const Multiaddr = require('multiaddr') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * List all peer addresses in the bootstrap list - * - * @param {AbortOptions} [options] - * @returns {Promise} - * @example - * ```js - * const res = await ipfs.bootstrap.list() - * console.log(res.Peers) - * // Logs: - * // [address1, address2, ...] - * ``` + * @type {import('ipfs-core-types/src/bootstrap').API["list"]} */ async function list (options) { /** @type {string[]|null} */ @@ -29,9 +19,3 @@ module.exports = ({ repo }) => { return withTimeoutOption(list) } - -/** - * @typedef {import('./utils').Peers} Peers - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - */ diff --git a/packages/ipfs-core/src/components/bootstrap/reset.js b/packages/ipfs-core/src/components/bootstrap/reset.js index 9fdf3bc743..c1bcf951b5 100644 --- a/packages/ipfs-core/src/components/bootstrap/reset.js +++ b/packages/ipfs-core/src/components/bootstrap/reset.js @@ -6,23 +6,15 @@ const Multiaddr = require('multiaddr') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * List all peer addresses in the bootstrap list - * - * @param {AbortOptions} options - * @returns {Promise} - * @example - * ```js - * const res = await ipfs.bootstrap.list() - * console.log(res.Peers) - * // Logs: - * // [address1, address2, ...] - * ``` + * @type {import('ipfs-core-types/src/bootstrap').API["reset"]} */ async function reset (options = {}) { + /** @type {import('ipfs-core-types/src/config').Config} */ + // @ts-ignore repo returns type unknown const config = await repo.config.getAll(options) config.Bootstrap = defaultConfig().Bootstrap @@ -35,8 +27,3 @@ module.exports = ({ repo }) => { return withTimeoutOption(reset) } - -/** - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('./utils').Peers} Peers - */ diff --git a/packages/ipfs-core/src/components/bootstrap/rm.js b/packages/ipfs-core/src/components/bootstrap/rm.js index 1e10b1b87b..a326748a04 100644 --- a/packages/ipfs-core/src/components/bootstrap/rm.js +++ b/packages/ipfs-core/src/components/bootstrap/rm.js @@ -5,28 +5,19 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * Remove a peer address from the bootstrap list - * - * @param {Multiaddr} multiaddr - The address of a network peer - * @param {AbortOptions} options - * @returns {Promise} - * @example - * ```js - * const res = await ipfs.bootstrap.list() - * console.log(res.Peers) - * // Logs: - * // [address1, address2, ...] - * ``` + * @type {import('ipfs-core-types/src/bootstrap').API["rm"]} */ async function rm (multiaddr, options = {}) { if (!isValidMultiaddr(multiaddr)) { throw new Error(`${multiaddr} is not a valid Multiaddr`) } + /** @type {import('ipfs-core-types/src/config').Config} */ + // @ts-ignore repo returns type unknown const config = await repo.config.getAll(options) config.Bootstrap = (config.Bootstrap || []).filter(ma => ma.toString() !== multiaddr.toString()) @@ -37,9 +28,3 @@ module.exports = ({ repo }) => { return withTimeoutOption(rm) } - -/** - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').Multiaddr} Multiaddr - * @typedef {import('./utils').Peers} Peers - */ diff --git a/packages/ipfs-core/src/components/bootstrap/utils.js b/packages/ipfs-core/src/components/bootstrap/utils.js index 48dd2c61d7..41762b870f 100644 --- a/packages/ipfs-core/src/components/bootstrap/utils.js +++ b/packages/ipfs-core/src/components/bootstrap/utils.js @@ -4,7 +4,6 @@ const isMultiaddr = require('mafmt').IPFS.matches /** * @param {any} ma - * @returns {boolean} */ exports.isValidMultiaddr = ma => { try { @@ -13,11 +12,3 @@ exports.isValidMultiaddr = ma => { return false } } - -/** - * @typedef {Object} Peers - * An object that contains an array with all the added addresses - * @property {Array} Peers - * - * @typedef {import('..').Multiaddr} Multiaddr - */ diff --git a/packages/ipfs-core/src/components/cat.js b/packages/ipfs-core/src/components/cat.js index 8146ec4119..e161a608d3 100644 --- a/packages/ipfs-core/src/components/cat.js +++ b/packages/ipfs-core/src/components/cat.js @@ -1,23 +1,19 @@ 'use strict' -const exporter = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const { normalizeCidPath } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @typedef {Object} Context - * @property {import('.').IPLD} ipld - * @property {import('.').Preload} preload + * @property {import('ipld')} ipld + * @property {import('../types').Preload} preload * * @param {Context} context */ module.exports = function ({ ipld, preload }) { /** - * Returns content of the file addressed by a valid IPFS Path or CID. - * - * @param {import('ipfs-core-types/src/root').IPFSPath} ipfsPath - * @param {import('ipfs-core-types/src/root').CatOptions} [options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/root').API["cat"]} */ async function * cat (ipfsPath, options = {}) { ipfsPath = normalizeCidPath(ipfsPath) @@ -30,7 +26,7 @@ module.exports = function ({ ipld, preload }) { const file = await exporter(ipfsPath, ipld, options) // File may not have unixfs prop if small & imported with rawLeaves true - if (file.unixfs && file.unixfs.type.includes('dir')) { + if (file.type === 'directory') { throw new Error('this dag node is a directory') } diff --git a/packages/ipfs-core/src/components/config.js b/packages/ipfs-core/src/components/config.js index bc598485ae..c20169573e 100644 --- a/packages/ipfs-core/src/components/config.js +++ b/packages/ipfs-core/src/components/config.js @@ -4,9 +4,17 @@ const getDefaultConfig = require('../runtime/config-nodejs.js') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const log = require('debug')('ipfs:core:config') +/** + * @typedef {import('ipfs-core-types/src/config').Config} Config + * + * @typedef {object} Transformer + * @property {string} description + * @property {(config: Config) => Config} transform + */ + /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { return { @@ -21,49 +29,43 @@ module.exports = ({ repo }) => { } /** - * @param {AbortOptions} [options] + * @type {import('ipfs-core-types/src/config').API["getAll"]} */ async function getAll (options = {}) { // eslint-disable-line require-await + // @ts-ignore TODO: move config typedefs into ipfs-repo return repo.config.getAll(options) } /** - * - * @param {string} key - * @param {AbortOptions} [options] + * @type {import('ipfs-core-types/src/config').API["get"]} */ async function get (key, options) { // eslint-disable-line require-await if (!key) { return Promise.reject(new Error('key argument is required')) } + // @ts-ignore TODO: move config typedefs into ipfs-repo return repo.config.get(key, options) } /** - * - * @param {string} key - * @param {ToJSON} value - * @param {AbortOptions} [options] + * @type {import('ipfs-core-types/src/config').API["set"]} */ async function set (key, value, options) { // eslint-disable-line require-await return repo.config.set(key, value, options) } /** - * @param {IPFSConfig} value - * @param {AbortOptions} [options] + * @type {import('ipfs-core-types/src/config').API["replace"]} */ async function replace (value, options) { // eslint-disable-line require-await return repo.config.replace(value, options) } /** - * @param {string} profileName - * @param {*} options - * @returns {Promise<{original: IPFSConfig, updated: IPFSConfig}>} + * @type {import('ipfs-core-types/src/config/profiles').API["apply"]} */ - async function applyProfile (profileName, options = {}) { + async function applyProfile (profileName, options = { dryRun: false }) { const { dryRun } = options const profile = profiles[profileName] @@ -86,6 +88,7 @@ module.exports = ({ repo }) => { delete oldCfg.Identity.PrivKey delete newCfg.Identity.PrivKey + // @ts-ignore TODO: move config typedefs into ipfs-repo return { original: oldCfg, updated: newCfg } } catch (err) { log(err) @@ -96,8 +99,7 @@ module.exports = ({ repo }) => { } /** - * @param {any} _options - * @returns {Promise<{name:string, description:string}[]>} + * @type {import('ipfs-core-types/src/config/profiles').API["list"]} */ async function listProfiles (_options) { // eslint-disable-line require-await return Object.keys(profiles).map(name => ({ @@ -106,13 +108,12 @@ async function listProfiles (_options) { // eslint-disable-line require-await })) } +/** + * @type {Record} + */ const profiles = { server: { description: 'Recommended for nodes with public IPv4 address (servers, VPSes, etc.), disables host and content discovery and UPnP in local networks.', - /** - * @param {IPFSConfig} config - * @returns {IPFSConfig} - */ transform: (config) => { config.Discovery.MDNS.Enabled = false config.Discovery.webRTCStar.Enabled = false @@ -126,10 +127,6 @@ const profiles = { }, 'local-discovery': { description: 'Sets default values to fields affected by `server` profile, enables discovery and UPnP in local networks.', - /** - * @param {IPFSConfig} config - * @returns {IPFSConfig} - */ transform: (config) => { config.Discovery.MDNS.Enabled = true config.Discovery.webRTCStar.Enabled = true @@ -143,10 +140,6 @@ const profiles = { }, test: { description: 'Reduces external interference, useful for running ipfs in test environments. Note that with these settings node won\'t be able to talk to the rest of the network without manual bootstrap.', - /** - * @param {IPFSConfig} config - * @returns {IPFSConfig} - */ transform: (config) => { const defaultConfig = getDefaultConfig() @@ -167,10 +160,6 @@ const profiles = { }, 'default-networking': { description: 'Restores default network settings. Inverse profile of the `test` profile.', - /** - * @param {IPFSConfig} config - * @returns {IPFSConfig} - */ transform: (config) => { const defaultConfig = getDefaultConfig() @@ -191,10 +180,6 @@ const profiles = { }, lowpower: { description: 'Reduces daemon overhead on the system. May affect node functionality,performance of content discovery and data fetching may be degraded. Recommended for low power systems.', - /** - * @param {IPFSConfig} config - * @returns {IPFSConfig} - */ transform: (config) => { const Swarm = config.Swarm || {} const ConnMgr = Swarm.ConnMgr || {} @@ -209,10 +194,6 @@ const profiles = { }, 'default-power': { description: 'Inverse of "lowpower" profile.', - /** - * @param {IPFSConfig} config - * @returns {IPFSConfig} - */ transform: (config) => { const defaultConfig = getDefaultConfig() @@ -225,303 +206,3 @@ const profiles = { } module.exports.profiles = profiles - -/** - * @typedef {Object} Config - * @property {Get} get - * @property {GetAll} getAll - * @property {Set} set - * @property {Replace} replace - * @property {Profiles} profiles - * - * @callback Get - * Returns the currently being used config. If the daemon is off, it returns - * the stored config. - * - * @param {string} key - The key of the value that should be fetched from the - * config file. If no key is passed, then the whole config will be returned. - * @param {AbortOptions} [options] - * @returns {Promise} - An object containing the configuration of the IPFS node - * @example - * const config = await ipfs.config.get('Addresses.Swarm') - * console.log(config) - * - * - * @callback GetAll - * Returns the full config been used. If the daemon is off, it returns the - * stored config. - * - * @param {AbortOptions} [options] - * @returns {Promise} - * @example - * const config = await ipfs.config.getAll() - * console.log(config) - * - * @callback Set - * Adds or replaces a config value. Note that this operation will not spark the - * restart of any service, i.e: if a config.replace changes the multiaddrs of - * the Swarm, Swarm will have to be restarted manually for the changes to take - * an effect. - * - * @param {string} key - The key of the value that should be added or replaced. - * @param {ToJSON} value - The value to be set. - * @param {AbortOptions} [options] - * @returns {Promise} - Promise succeeds if config change succeeded, - * otherwise fails with error. - * @example - * // Disable MDNS Discovery - * await ipfs.config.set('Discovery.MDNS.Enabled', false) - * - * @callback Replace - * Adds or replaces a config file. - * - * Note that this operation will not spark the restart of any service, - * i.e: if a config.replace changes the multiaddrs of the Swarm, Swarm will - * have to be restarted manually for the changes to take an effect. - * - * @param {IPFSConfig} value - A new configuration. - * @param {AbortOptions} [options] - * @returns {Promise} - * @example - * const newConfig = { - * Bootstrap: [] - * } - * await ipfs.config.replace(newConfig) - * - * @typedef {Object} Profiles - * @property {ListProfiles} list - * @property {ApplyProfile} apply - * - * @callback ListProfiles - * List available config profiles - * @param {AbortOptions} [options] - * @returns {Promise} - An array with all the available config profiles - * @example - * const profiles = await ipfs.config.profiles.list() - * profiles.forEach(profile => { - * console.info(profile.name, profile.description) - * }) - * - * @typedef {Object} Profile - * @property {string} description - * @property {string} name - * - * - * @callback ApplyProfile - * List available config profiles - * @param {string} name - * @param {ApplyOptions & AbortOptions} [options] - * @returns {Promise<{original: IPFSConfig, updated: IPFSConfig}>} - * - * @typedef {Object} ApplyOptions - * @property {boolean} [dryRun=false] - If true does not apply the profile - * - * - * @typedef {Object} IPFSConfig - * @property {AddressConfig} Addresses - * @property {string} [Profiles] - * @property {string[]} [Bootstrap] - * @property {DiscoveryConfig} Discovery - * @property {DatastoreConfig} [Datastore] - * @property {IdentityConfig} [Identity] - * @property {KeychainConfig} [Keychain] - * @property {PubsubConfig} [Pubsub] - * @property {SwarmConfig} [Swarm] - * @property {RoutingConfig} [Routing] - * - * @typedef {Object} AddressConfig - * Contains information about various listener addresses to be used by this node. - * @property {APIAddress} [API='/ip4/127.0.0.1/tcp/5002'] - * @property {DelegateAddress} [Delegates=[]] - * @property {GatewayAddress} [Gateway='/ip4/127.0.0.1/tcp/9090'] - * @property {SwarmAddress} [Swarm=['/ip4/0.0.0.0/tcp/4002', '/ip4/127.0.0.1/tcp/4003/ws']] - * * - * @typedef {string} Multiaddr - * Composable and future-proof network address following [Multiaddr][] - * specification. - * - * [Multiaddr]:https://github.com/multiformats/multiaddr/ - * - * @typedef {Multiaddr|Multiaddr[]} APIAddress - * The IPFS daemon exposes an [HTTP API][] that allows to control the node and - * run the same commands as you can do from the command line. It is defined on - * the [HTTP API][] Spec. - * - * [Multiaddr][] or array of [Multiaddr][] describing the address(es) to serve the - * [HTTP API][] on. - * - * [Multiaddr]:https://github.com/multiformats/multiaddr/ - * [HTTP API]:https://docs.ipfs.io/reference/api/http - * - * @typedef {Multiaddr[]} DelegateAddress - * Delegate peers are used to find peers and retrieve content from the network - * on your behalf. - * - * Array of [Multiaddr][] describing which addresses to use as delegate nodes. - * - * [Multiaddr]:https://github.com/multiformats/multiaddr/ - * - * @typedef {Multiaddr|Multiaddr[]} GatewayAddress - * A gateway is exposed by the IPFS daemon, which allows an easy way to access - * content from IPFS, using an IPFS path. - * - * [Multiaddr][] or array of [Multiaddr][] describing the address(es) to serve - * the gateway on. - * - * [Multiaddr]:https://github.com/multiformats/multiaddr/ - * - * @typedef {Multiaddr[]} SwarmAddress - * Array of [Multiaddr][] describing which addresses to listen on for p2p swarm - * connections. - * - * [Multiaddr]:https://github.com/multiformats/multiaddr/ - * - * - * @typedef {Multiaddr[]} BootstrapConfig - * Bootstrap is an array of [Multiaddr][] of trusted nodes to connect to in order - * to initiate a connection to the network. - * - * [Multiaddr]:https://github.com/multiformats/multiaddr/ - * - * @typedef {Object} DatastoreConfig - * Contains information related to the construction and operation of the on-disk - * storage system. - * @property {DatastoreSpec} [Spec] - * - * @typedef {Object} DatastoreSpec - * Spec defines the structure of the IPFS datastore. It is a composable - * structure, where each datastore is represented by a JSON object. Datastores - * can wrap other datastores to provide extra functionality (e.g. metrics, - * logging, or caching). - * - * This can be changed manually, however, if you make any changes that require - * a different on-disk structure, you will need to run the [ipfs-ds-convert][] - * tool to migrate data into the new structures. - * - * [ipfs-ds-convert]:https://github.com/ipfs/ipfs-ds-convert - * - * Default: - * ```json - * { - * "mounts": [ - * { - * "child": { - * "path": "blocks", - * "shardFunc": "/repo/flatfs/shard/v1/next-to-last/2", - * "sync": true, - * "type": "flatfs" - * }, - * "mountpoint": "/blocks", - * "prefix": "flatfs.datastore", - * "type": "measure" - * }, - * { - * "child": { - * "compression": "none", - * "path": "datastore", - * "type": "levelds" - * }, - * "mountpoint": "/", - * "prefix": "leveldb.datastore", - * "type": "measure" - * } - * ], - * "type": "mount" - * } - * ``` - * - * @typedef {Object} DiscoveryConfig - * Contains options for configuring IPFS node discovery mechanisms. - * @property {MDNSDiscovery} MDNS - * @property {WebRTCStarDiscovery} webRTCStar - * - * @typedef {Object} MDNSDiscovery - * Multicast DNS is a discovery protocol that is able to find other peers on the local network. - * @property {boolean} [Enabled=true] - A boolean value for whether or not MDNS - * should be active. - * @property {number} [Interval=10] - A number of seconds to wait between - * discovery checks. - * - * @typedef {Object} WebRTCStarDiscovery - * WebRTCStar is a discovery mechanism prvided by a signalling-star that allows - * peer-to-peer communications in the browser. - * @property {boolean} [Enabled=true] - A boolean value for whether or not - * webRTCStar should be active. - * - * @typedef {Object} IdentityConfig - * @property {PeerID} [PeerID] - * @property {PrivateKey} [PrivKey] - * - * @typedef {string} PeerID - * The unique PKI identity label for this configs peer. Set on init and never - * read, its merely here for convenience. IPFS will always generate the peerID - * from its keypair at runtime. - * - * @typedef {string} PrivateKey - * The base64 encoded protobuf describing (and containing) the nodes private key. - * - * @typedef {Object} KeychainConfig - * We can customize the key management and criptographically protected messages - * by changing the Keychain options. Those options are used for generating the - * derived encryption key (DEK). - * - * The DEK object, along with the passPhrase, is the input to a PBKDF2 function. - * - * You can check the [parameter choice for pbkdf2](https://cryptosense.com/parameter-choice-for-pbkdf2/) - * for more information. - * @property {DEK} DEK - * - * @typedef {Object} DEK - * @property {number} keyLength - * @property {number} iterationCount - * @property {string} salt - * @property {string} hash - * - * @typedef {Object} PubsubConfig - * Options for configuring the pubsub subsystem. It is important pointing out - * that this is not supported in the browser. If you want to configure a - * different pubsub router in the browser you must configure - * `libp2p.modules.pubsub` options instead. - * - * @property {PubSubRouter} [Router='gossipsub'] - * @property {boolean} [Enabled=true] - * - * @typedef {'gossipsub'|'floodsub'} PubSubRouter - * A string value for specifying which pubsub routing protocol to use. You can - * either use `'gossipsub'` in order to use the [ChainSafe/gossipsub-js] - * (https://github.com/ChainSafe/gossipsub-js) implementation, or `'floodsub'` - * to use the [libp2p/js-libp2p-floodsub](https://github.com/libp2p/js-libp2p-floodsub) - * implementation. - * - * You can read more about these implementations on the [libp2p/specs/pubsub] - * (https://github.com/libp2p/specs/tree/master/pubsub) document. - * - * @typedef {Object} SwarmConfig - * Options for configuring the swarm. - * @property {ConnMgrConfig} [ConnMgr] - * @property {boolean} [DisableNatPortMap] - * - * @typedef {Object} ConnMgrConfig - * The connection manager determines which and how many connections to keep and - * can be configured to keep. - * - * The "basic" connection manager tries to keep between `LowWater` and - * `HighWater` connections. It works by: - * - * 1. Keeping all connections until `HighWater` connections is reached. - * 2. Once `HighWater` is reached, it closes connections until `LowWater` is - * reached. - * - * @property {number} [LowWater=200] - The minimum number of connections to - * maintain. - * @property {number} [HighWater=500] - The number of connections that, when - * exceeded, will trigger a connection GC operation. - * - * {{LowWater?:number, HighWater?:number}} ConnMgr - * - * @typedef {Object} RoutingConfig - * @property {string} [Type] - * - * @typedef {import('ipfs-core-types/src/basic').ToJSON} ToJSON - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dag/get.js b/packages/ipfs-core/src/components/dag/get.js index 7fb86bfca8..15b5d01c5f 100644 --- a/packages/ipfs-core/src/components/dag/get.js +++ b/packages/ipfs-core/src/components/dag/get.js @@ -7,57 +7,12 @@ const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') /** * @param {Object} config - * @param {import('..').IPLD} config.ipld - * @param {import('..').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { /** - * Retrieve an IPLD format node - * - * @example - * ```js - * // example obj - * const obj = { - * a: 1, - * b: [1, 2, 3], - * c: { - * ca: [5, 6, 7], - * cb: 'foo' - * } - * } - * - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - * console.log(cid.toString()) - * // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 - * - * async function getAndLog(cid, path) { - * const result = await ipfs.dag.get(cid, { path }) - * console.log(result.value) - * } - * - * await getAndLog(cid, '/a') - * // Logs: - * // 1 - * - * await getAndLog(cid, '/b') - * // Logs: - * // [1, 2, 3] - * - * await getAndLog(cid, '/c') - * // Logs: - * // { - * // ca: [5, 6, 7], - * // cb: 'foo' - * // } - * - * await getAndLog(cid, '/c/ca/1') - * // Logs: - * // 6 - * ``` - * - * @param {CID|string} ipfsPath - A DAG node that follows one of the supported IPLD formats - * @param {GetOptions & AbortOptions} [options] - An optional configration - * @returns {Promise} + * @type {import('ipfs-core-types/src/dag').API["get"]} */ const get = async function get (ipfsPath, options = {}) { const { @@ -77,7 +32,7 @@ module.exports = ({ ipld, preload }) => { const entry = options.localResolve ? await first(ipld.resolve(cid, options.path)) : await last(ipld.resolve(cid, options.path)) - /** @type {DagEntry} - first and last will return undefined when empty */ + /** @type {import('ipfs-core-types/src/dag').GetResult} - first and last will return undefined when empty */ const result = (entry) return result } @@ -90,18 +45,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(get) } - -/** - * @typedef {Object} GetOptions - * @property {boolean} [localResolve=false] - * @property {number} [timeout] - * @property {boolean} [preload=false] - * @property {string} [path] - An optional path within the DAG to resolve - * - * @typedef {Object} DagEntry - * @property {Object} value - * @property {string} remainderPath - * - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dag/index.js b/packages/ipfs-core/src/components/dag/index.js index ccb49b2986..fbdc823b65 100644 --- a/packages/ipfs-core/src/components/dag/index.js +++ b/packages/ipfs-core/src/components/dag/index.js @@ -5,16 +5,17 @@ const createResolve = require('./resolve') const createTree = require('./tree') const createPut = require('./put') -class Reader { - /** - * @param {ReaderConfig} config - */ - constructor (config) { - this.get = createGet(config) - this.resolve = createResolve(config) - this.tree = createTree(config) - } -} +/** + * @typedef {Object} ReaderConfig + * @property {IPLD} ipld + * @property {Preload} preload + * + * @typedef {import('ipld')} IPLD + * @typedef {import('../../types').Preload} Preload + * @typedef {import('ipfs-core-types/src/pin').API} Pin + * @typedef {import('../gc-lock').GCLock} GCLock + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ class DagAPI { /** @@ -23,47 +24,13 @@ class DagAPI { * @param {Preload} config.preload * @param {Pin} config.pin * @param {GCLock} config.gcLock - * @param {DagReader} config.dagReader */ - constructor ({ ipld, pin, preload, gcLock, dagReader }) { - const { get, resolve, tree } = dagReader - const put = createPut({ ipld, preload, pin, gcLock }) - - this.get = get - this.resolve = resolve - this.tree = tree - this.put = put - } - - /** - * Creates a reader part of the DAG API. This allows other APIs that require - * reader parts of the DAG API to be instantiated before components required - * by writer end are. - * - * @param {ReaderConfig} config - * @returns {DagReader} - */ - static reader (config) { - return new Reader(config) + constructor ({ ipld, pin, preload, gcLock }) { + this.get = createGet({ ipld, preload }) + this.resolve = createResolve({ ipld, preload }) + this.tree = createTree({ ipld, preload }) + this.put = createPut({ ipld, preload, pin, gcLock }) } } module.exports = DagAPI - -/** - * @typedef {Object} DagReader - * @property {ReturnType} get - * @property {ReturnType} resolve - * @property {ReturnType} tree - * - * @typedef {Object} ReaderConfig - * @property {IPLD} ipld - * @property {Preload} preload - * - * @typedef {import('..').IPLD} IPLD - * @typedef {import('..').Preload} Preload - * @typedef {import('..').Pin} Pin - * @typedef {import('..').GCLock} GCLock - * @typedef {import('..').CID} CID - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dag/put.js b/packages/ipfs-core/src/components/dag/put.js index 687d11ccc8..41491d415b 100644 --- a/packages/ipfs-core/src/components/dag/put.js +++ b/packages/ipfs-core/src/components/dag/put.js @@ -1,36 +1,37 @@ 'use strict' const multicodec = require('multicodec') +const multihashes = require('multihashing-async').multihashes /** - * @param {string} name - * @returns {number} + * @typedef {import('cids')} CID + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('multicodec').CodecCode} CodecCode + * @typedef {import('multicodec').CodecName} CodecName + * @typedef {import('multihashes').HashCode} HashCode + * @typedef {import('multihashes').HashName} HashName */ -const nameToCodec = name => multicodec[name.toUpperCase().replace(/-/g, '_')] +/** + * + * @param {CodecName} name + */ +const nameToCodec = name => multicodec.getCodeFromName(name) +/** + * @param {HashName} name + */ +const nameToHashCode = name => multihashes.names[name] const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Pin} config.pin - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('ipfs-core-types/src/pin').API} config.pin + * @param {import('../../types').Preload} config.preload * @param {import('.').GCLock} config.gcLock */ module.exports = ({ ipld, pin, gcLock, preload }) => { /** - * Store an IPLD format node - * - * @param {Object} dagNode - * @param {PutOptions & AbortOptions} [options] - * @returns {Promise} - * @example - * ```js - * const obj = { simple: 'object' } - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha3-512' }) - * - * console.log(cid.toString()) - * // zBwWX9ecx5F4X54WAjmFLErnBT6ByfNxStr5ovowTL7AhaUR98RWvXPS1V3HqV1qs3r5Ec5ocv7eCdbqYQREXNUfYNuKG - * ``` + * @type {import('ipfs-core-types/src/dag').API["put"]} */ async function put (dagNode, options = {}) { const { cidVersion, format, hashAlg } = readEncodingOptions(options) @@ -66,8 +67,7 @@ module.exports = ({ ipld, pin, gcLock, preload }) => { } /** - * - * @param {PutOptions} options + * @param {import('ipfs-core-types/src/dag').PutOptions} options */ const readEncodingOptions = (options) => { if (options.cid && (options.format || options.hashAlg)) { @@ -91,22 +91,22 @@ const readEncodingOptions = (options) => { /** * * @param {Object} options - * @param {number|string} options.format - * @param {number|string} [options.hashAlg] + * @param {CodecCode|CodecName} options.format + * @param {HashCode|HashName} [options.hashAlg] */ const encodingCodes = ({ format, hashAlg }) => ({ format: typeof format === 'string' ? nameToCodec(format) : format, - hashAlg: typeof hashAlg === 'string' ? nameToCodec(hashAlg) : hashAlg + hashAlg: typeof hashAlg === 'string' ? nameToHashCode(hashAlg) : hashAlg }) /** * Figures out what version of CID should be used given the options. * * @param {Object} options - * @param {0|1} [options.version] + * @param {CIDVersion} [options.version] * @param {CID} [options.cid] - * @param {number} [options.format] - * @param {number} [options.hashAlg] + * @param {CodecCode} [options.format] + * @param {HashCode} [options.hashAlg] */ const readVersion = ({ version, cid, format, hashAlg }) => { // If version is passed just use that. @@ -124,35 +124,7 @@ const readVersion = ({ version, cid, format, hashAlg }) => { } } -/** @type {WithCIDOptions} */ const defaultCIDOptions = { - format: multicodec.DAG_CBOR, - hashAlg: multicodec.SHA2_256 + format: multicodec.getCodeFromName('dag-cbor'), + hashAlg: multihashes.names['sha2-256'] } - -/** - * @typedef {PutWith & OtherPutOptions} PutOptions - * @typedef {WithCID | WithCIDOptions} PutWith - * - * - * @typedef {Object} WithCID - * @property {CID} [cid] - * // Note: We still stil need to reserve these fields otherwise it implies - * // that those fields can still be there and have very different types. - * @property {undefined} [format] - * @property {undefined} [hashAlg] - * @property {undefined} [version] - * - * @typedef {Object} WithCIDOptions - * @property {undefined} [cid] - * @property {string|number} format - * @property {string|number} hashAlg - * @property {0|1} [version] - * - * @typedef {Object} OtherPutOptions - * @property {boolean} [pin=false] - * @property {boolean} [preload=false] - * - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dag/resolve.js b/packages/ipfs-core/src/components/dag/resolve.js index 574b62665d..f8e0c7855b 100644 --- a/packages/ipfs-core/src/components/dag/resolve.js +++ b/packages/ipfs-core/src/components/dag/resolve.js @@ -6,40 +6,12 @@ const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { /** - * Returns the CID and remaining path of the node at the end of the passed IPFS path - * - * @param {CID|string} ipfsPath - * @param {ResolveOptions & AbortOptions} options - * @returns {Promise} - * @example - * ```JavaScript - * // example obj - * const obj = { - * a: 1, - * b: [1, 2, 3], - * c: { - * ca: [5, 6, 7], - * cb: 'foo' - * } - * } - * - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - * console.log(cid.toString()) - * // bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq - * - * const result = await ipfs.dag.resolve(`${cid}/c/cb`) - * console.log(result) - * // Logs: - * // { - * // cid: CID(bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq), - * // remainderPath: 'c/cb' - * // } - * ``` + * @type {import('ipfs-core-types/src/dag').API["resolve"]} */ async function resolve (ipfsPath, options = {}) { const { @@ -92,16 +64,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(resolve) } - -/** - * @typedef {Object} ResolveOptions - * @property {string} [path] - If `ipfsPath` is a `CID`, you may pass a path here - * @property {boolean} [preload] - * - * @typedef {Object} ResolveResult - * @property {CID} cid - The last CID encountered during the traversal - * @property {string} remainderPath - The path to the end of the IPFS path - * inside the node referenced by the CID - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dag/tree.js b/packages/ipfs-core/src/components/dag/tree.js index f18c980afa..d65b627532 100644 --- a/packages/ipfs-core/src/components/dag/tree.js +++ b/packages/ipfs-core/src/components/dag/tree.js @@ -5,47 +5,12 @@ const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { /** - * Enumerate all the entries in a graph - * - * @param {CID} ipfsPath - A DAG node that follows one of the supported IPLD formats - * @param {TreeOptions & AbortOptions} [options] - * @returns {AsyncIterable} - * @example - * ```js - * // example obj - * const obj = { - * a: 1, - * b: [1, 2, 3], - * c: { - * ca: [5, 6, 7], - * cb: 'foo' - * } - * } - * - * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - * console.log(cid.toString()) - * // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 - * - * const result = await ipfs.dag.tree('zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5') - * console.log(result) - * // Logs: - * // a - * // b - * // b/0 - * // b/1 - * // b/2 - * // c - * // c/ca - * // c/ca/0 - * // c/ca/1 - * // c/ca/2 - * // c/cb - * ``` + * @type {import('ipfs-core-types/src/dag').API["tree"]} */ async function * tree (ipfsPath, options = {}) { // eslint-disable-line require-await const { @@ -66,17 +31,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(tree) } - -/** - * @typedef {Object} TreeOptions - * @property {string} [path] - If `ipfsPath` is a `CID`, you may pass a path here - * @property {boolean} [preload] - * - * @typedef {Object} TreeResult - * @property {CID} cid - The last CID encountered during the traversal - * @property {string} remainderPath - The path to the end of the IPFS path - * inside the node referenced by the CID - * - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dht.js b/packages/ipfs-core/src/components/dht.js index 73f3b91b4c..c609aaadf1 100644 --- a/packages/ipfs-core/src/components/dht.js +++ b/packages/ipfs-core/src/components/dht.js @@ -9,17 +9,13 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network - * @param {import('.').Repo} config.repo + * @param {import('../types').NetworkService} config.network + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ network, repo }) => { const { get, put, findProvs, findPeer, provide, query } = { /** - * Given a key, query the DHT for its best value. - * - * @param {Uint8Array|string} key - * @param {AbortOptions} [options] - The key associated with the value to find - * @returns {Promise} + * @type {import('ipfs-core-types/src/dht').API["get"]} */ async get (key, options = {}) { const { libp2p } = await use(network, options) @@ -27,16 +23,7 @@ module.exports = ({ network, repo }) => { }, /** - * Write a key/value pair to the DHT. - * - * Given a key of the form /foo/bar and a value of any - * form, this will write that value to the DHT with - * that key. - * - * @param {Uint8Array} key - * @param {Uint8Array} value - * @param {AbortOptions} [options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/dht').API["put"]} */ async * put (key, value, options) { const { libp2p } = await use(network, options) @@ -44,27 +31,15 @@ module.exports = ({ network, repo }) => { }, /** - * Find peers in the DHT that can provide a specific value, given a CID. - * - * @param {CID} cid - They key to find providers for. - * @param {FindProvsOptions & AbortOptions} [options] - findProviders options - * @returns {AsyncIterable} - * - * @example - * ```js - * const providers = ipfs.dht.findProvs('QmdPAhQRxrDKqkGPvQzBvjYe3kU8kiEEAd2J6ETEamKAD9') - * for await (const provider of providers) { - * console.log(provider.id.toString()) - * } - * ``` + * @type {import('ipfs-core-types/src/dht').API["findProvs"]} */ - async * findProvs (cid, options = {}) { + async * findProvs (cid, options = { numProviders: 20 }) { const { libp2p } = await use(network, options) - if (options.numProviders) { - options.maxNumProviders = options.numProviders - } - for await (const peer of libp2p._dht.findProviders(normalizeCID(cid), options)) { + for await (const peer of libp2p._dht.findProviders(normalizeCID(cid), { + maxNumProviders: options.numProviders, + signal: options.signal + })) { yield { id: peer.id.toB58String(), addrs: peer.addrs @@ -73,25 +48,7 @@ module.exports = ({ network, repo }) => { }, /** - * Query the DHT for all multiaddresses associated with a `PeerId`. - * - * @param {PeerId|CID} peerId - The id of the peer to search for. - * @param {AbortOptions} [options] - * @returns {Promise<{id: string, addrs: Multiaddr[]}>} - * @example - * ```js - * const info = await ipfs.dht.findPeer('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt') - * - * console.log(info.id) - * // QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt - * - * info.addrs.forEach(addr => console.log(addr.toString())) - * // '/ip4/147.75.94.115/udp/4001/quic' - * // '/ip6/2604:1380:3000:1f00::1/udp/4001/quic' - * // '/dnsaddr/bootstrap.libp2p.io' - * // '/ip6/2604:1380:3000:1f00::1/tcp/4001' - * // '/ip4/147.75.94.115/tcp/4001' - * ``` + * @type {import('ipfs-core-types/src/dht').API["findPeer"]} */ async findPeer (peerId, options) { const { libp2p } = await use(network, options) @@ -108,13 +65,9 @@ module.exports = ({ network, repo }) => { }, /** - * Announce to the network that we are providing given values. - * - * @param {CID|CID[]} cids - The keys that should be announced. - * @param {ProvideOptions & AbortOptions} [options] - provide options - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/dht').API["provide"]} */ - async * provide (cids, options = {}) { + async * provide (cids, options = { recursive: false }) { const { libp2p } = await use(network, options) cids = Array.isArray(cids) ? cids : [cids] @@ -147,15 +100,11 @@ module.exports = ({ network, repo }) => { }, /** - * Find the closest peers to a given `PeerId`, by querying the DHT. - * - * @param {string|PeerId} peerId - The `PeerId` to run the query against. - * @param {AbortOptions} [options] - * @returns {AsyncIterable<{ id: CID, addrs: Multiaddr[] }>} + * @type {import('ipfs-core-types/src/dht').API["query"]} */ async * query (peerId, options) { const { libp2p } = await use(network, options) - if (typeof peerId === 'string') { + if (typeof peerId === 'string' || CID.isCID(peerId)) { peerId = PeerId.createFromCID(peerId) } @@ -188,7 +137,7 @@ module.exports = ({ network, repo }) => { const parseCID = cid => { try { const cidStr = cid.toString().split('/') - .filter(part => part && part !== 'ipfs' && part !== 'ipns')[0] + .filter((/** @type {string} */ part) => part && part !== 'ipfs' && part !== 'ipns')[0] return (new CID(cidStr)).bytes } catch (error) { @@ -205,8 +154,8 @@ const normalizeCID = cid => cid instanceof Uint8Array ? cid : parseCID(cid) /** - * @param {import('.').NetworkService} network - * @param {AbortOptions} [options] + * @param {import('../types').NetworkService} network + * @param {import('ipfs-core-types/src/basic').AbortOptions} [options] */ const use = async (network, options) => { const net = await network.use(options) @@ -216,24 +165,3 @@ const use = async (network, options) => { throw new NotEnabledError('dht not enabled') } } -/** - * @typedef {Object} QueryEvent - * @property {PeerId} id - * @property {number} type - * @property {string} extra - * @property {PeerInfo[]} responses - * - * @typedef {Object} ProvideOptions - * @property {boolean} [recursive=false] - Provide not only the given object but also all objects linked from it. - * - * @typedef {Object} FindProvsOptions - * @property {number} [numProviders] - maximum number of providers to find - * @property {number} [maxNumProviders] - * - * @typedef {Object} PeerInfo - * @property {PeerId} id - * @property {Multiaddr[]} addrs - * - * @typedef {import('multiaddr')} Multiaddr - * @typedef {import('../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/dns.js b/packages/ipfs-core/src/components/dns.js index 0d206989d5..99c2c58853 100644 --- a/packages/ipfs-core/src/components/dns.js +++ b/packages/ipfs-core/src/components/dns.js @@ -19,13 +19,9 @@ function fqdnFixups (domain) { module.exports = () => { /** - * Resolve DNS links - * - * @param {string} domain - * @param {DNSOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/root').API["dns"]} */ - const resolveDNS = async (domain, options = {}) => { // eslint-disable-line require-await + const resolveDNS = async (domain, options = { recursive: true }) => { // eslint-disable-line require-await if (typeof domain !== 'string') { throw new Error('Invalid arguments, domain must be a string') } @@ -37,12 +33,3 @@ module.exports = () => { return withTimeoutOption(resolveDNS) } - -/** - * @typedef {DNSSettings & AbortOptions} DNSOptions - * - * @typedef {Object} DNSSettings - * @property {boolean} [recursive=true] - Resolve until result is not a domain name - * - * @typedef {import('../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/chmod.js b/packages/ipfs-core/src/components/files/chmod.js index 01735d674a..1c576e3ff2 100644 --- a/packages/ipfs-core/src/components/files/chmod.js +++ b/packages/ipfs-core/src/components/files/chmod.js @@ -4,7 +4,7 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const log = require('debug')('ipfs:mfs:touch') const errCode = require('err-code') -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const toTrail = require('./utils/to-trail') const addLink = require('./utils/add-link') const updateTree = require('./utils/update-tree') @@ -13,23 +13,45 @@ const { DAGNode } = require('ipld-dag-pb') const mc = require('multicodec') const mh = require('multihashing-async').multihash const { pipe } = require('it-pipe') -const importer = require('ipfs-unixfs-importer') -const exporter = require('ipfs-unixfs-exporter') +const { importer } = require('ipfs-unixfs-importer') +const { recursive } = require('ipfs-unixfs-exporter') const last = require('it-last') const cp = require('./cp') const rm = require('./rm') +// @ts-ignore - TODO: refactor this so it does not require a deep require const persist = require('ipfs-unixfs-importer/src/utils/persist') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} flush + * @property {HashName} hashAlg + * @property {CIDVersion} cidVersion + * @property {number} shardSplitThreshold + * @property {boolean} recursive + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { flush: true, shardSplitThreshold: 1000, hashAlg: 'sha2-256', cidVersion: 0, - recursive: false, - signal: undefined + recursive: false } +/** + * @param {string} mode + * @param {number} originalMode + * @param {boolean} isDirectory + */ function calculateModification (mode, originalMode, isDirectory) { let modification = 0 @@ -48,6 +70,10 @@ function calculateModification (mode, originalMode, isDirectory) { return modification } +/** + * @param {string} references + * @param {number} modification + */ function calculateUGO (references, modification) { let ugo = 0 @@ -66,6 +92,11 @@ function calculateUGO (references, modification) { return ugo } +/** + * @param {string} references + * @param {string} mode + * @param {number} modification + */ function calculateSpecial (references, mode, modification) { if (mode.includes('t')) { modification += parseInt('1000', 8) @@ -84,7 +115,13 @@ function calculateSpecial (references, mode, modification) { return modification } -// https://en.wikipedia.org/wiki/Chmod#Symbolic_modes +/** + * https://en.wikipedia.org/wiki/Chmod#Symbolic_modes + * + * @param {string} input + * @param {number} originalMode + * @param {boolean} isDirectory + */ function parseSymbolicMode (input, originalMode, isDirectory) { if (!originalMode) { originalMode = 0 @@ -146,34 +183,42 @@ function parseSymbolicMode (input, originalMode, isDirectory) { if (operator === '-') { return modification ^ originalMode } + + return originalMode } +/** + * @param {string | InstanceType | number} mode + * @param {UnixFS} metadata + * @returns {number} + */ function calculateMode (mode, metadata) { - if (mode instanceof String) { - mode = mode.toString() - } + if (mode instanceof String || typeof mode === 'string') { + const strMode = `${mode}` - if (typeof mode === 'string') { - if (mode.match(/^\d+$/g)) { - mode = parseInt(mode, 8) + if (strMode.match(/^\d+$/g)) { + mode = parseInt(strMode, 8) } else { - mode = mode.split(',').reduce((curr, acc) => { + // @ts-ignore freaks out over the curr: number, acc: string thing + mode = 0 + strMode.split(',').reduce((curr, acc) => { return parseSymbolicMode(acc, curr, metadata.isDirectory()) - }, metadata.mode) + }, metadata.mode || 0) } } + // @ts-ignore return mode } +/** + * @param {MfsContext} context + */ module.exports = (context) => { /** - * @param {string} path - * @param {string | number} mode - * @param {ChmodOptions & AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/files').API["chmod"]} */ async function mfsChmod (path, mode, options = {}) { + /** @type {DefaultOptions} */ const opts = mergeOptions(defaultOptions, options) log(`Fetching stats for ${path}`) @@ -193,10 +238,14 @@ module.exports = (context) => { // but do not reimport files, only manipulate dag-pb nodes const root = await pipe( async function * () { - for await (const entry of exporter.recursive(cid, context.ipld)) { - let node = await context.ipld.get(entry.cid) + for await (const entry of recursive(cid, context.ipld)) { + if (entry.type !== 'file' && entry.type !== 'directory') { + throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') + } + entry.unixfs.mode = calculateMode(mode, entry.unixfs) - node = new DAGNode(entry.unixfs.marshal(), node.Links) + + const node = new DAGNode(entry.unixfs.marshal(), entry.node.Links) yield { path: entry.path, @@ -204,19 +253,26 @@ module.exports = (context) => { } } }, + // @ts-ignore source is not compatible because we are not importing files (source) => importer(source, context.block, { ...opts, pin: false, dagBuilder: async function * (source, block, opts) { for await (const entry of source) { yield async function () { - const cid = await persist(entry.content.serialize(), block, opts) + /** @type {DAGNode} */ + // @ts-ignore + const node = entry.content + + const buf = node.serialize() + const cid = await persist(buf, block, opts) + const unixfs = UnixFS.unmarshal(node.Data) return { cid, + size: buf.length, path: entry.path, - unixfs: UnixFS.unmarshal(entry.content.Data), - node: entry.content + unixfs } } } @@ -225,6 +281,10 @@ module.exports = (context) => { (nodes) => last(nodes) ) + if (!root) { + throw errCode(new Error(`Could not chmod ${path}`), 'ERR_COULD_NOT_CHMOD') + } + // remove old path from mfs await rm(context)(path, opts) @@ -239,9 +299,12 @@ module.exports = (context) => { metadata.mode = calculateMode(mode, metadata) node = new DAGNode(metadata.marshal(), node.Links) + /** @type {HashName} */ + const hashAlg = opts.hashAlg || defaultOptions.hashAlg + const updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: cid.version, - hashAlg: mh.names[opts.hashAlg || defaultOptions.hashAlg], + hashAlg: mh.names[hashAlg], onlyHash: !opts.flush }) @@ -255,8 +318,9 @@ module.exports = (context) => { cid: updatedCid, size: node.serialize().length, flush: opts.flush, - hashAlg: opts.hashAlg, - cidVersion: cid.version + hashAlg: hashAlg, + cidVersion: cid.version, + shardSplitThreshold: Infinity }) parent.cid = result.cid @@ -270,15 +334,3 @@ module.exports = (context) => { return withTimeoutOption(mfsChmod) } - -/** - * @typedef {Object} ChmodOptions - * @property {boolean} [flush=false] - * @property {number} [shardSplitThreshold=1000] - * @property {string} [hashAlg=sha2-256] - * @property {0|1} [cidVersion=0] - * @property {boolean} [recursive=false] - * - * @typedef {import('cids')} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/cp.js b/packages/ipfs-core/src/components/files/cp.js index 8936421a75..f324aa9b4e 100644 --- a/packages/ipfs-core/src/components/files/cp.js +++ b/packages/ipfs-core/src/components/files/cp.js @@ -8,39 +8,63 @@ const updateTree = require('./utils/update-tree') const updateMfsRoot = require('./utils/update-mfs-root') const addLink = require('./utils/add-link') const toMfsPath = require('./utils/to-mfs-path') -const toSourcesAndDestination = require('./utils/to-sources-and-destination') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toTrail = require('./utils/to-trail') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids')} CID + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('ipfs-unixfs').Mtime} Mtime + * @typedef {import('./utils/to-mfs-path').MfsPath} MfsPath + * @typedef {import('./utils/to-trail').MfsTrail} MfsTrail + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} parents + * @property {boolean} flush + * @property {HashName} hashAlg + * @property {CIDVersion} cidVersion + * @property {number} shardSplitThreshold + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { parents: false, flush: true, hashAlg: 'sha2-256', cidVersion: 0, - shardSplitThreshold: 1000, - signal: undefined + shardSplitThreshold: 1000 } /** - * @param {any} context + * @param {MfsContext} context */ -module.exports = function derp (context) { +module.exports = (context) => { /** - * @param {[...from:From, options?:CpOptions]} args - * @returns {Promise} + * @type {import('ipfs-core-types/src/files').API["cp"]} */ - async function mfsCp (...args) { - let { - sources, - destination, - options - } = await toSourcesAndDestination(context, args, defaultOptions) - - if (!sources.length) { - throw errCode(new Error('Please supply at least one source'), 'ERR_INVALID_PARAMS') + async function mfsCp (from, to, opts) { + /** @type {DefaultOptions} */ + const options = mergeOptions(defaultOptions, opts) + + if (!Array.isArray(from)) { + from = [from] } - options.parents = options.p || options.parents + const sources = await Promise.all( + from.map(path => toMfsPath(context, path, options)) + ) + let destination = await toMfsPath(context, to, options) + + if (!sources.length || !destination) { + throw errCode(new Error('Please supply at least one source'), 'ERR_INVALID_PARAMS') + } // make sure all sources exist const missing = sources.find(source => !source.exists) @@ -94,6 +118,11 @@ module.exports = function derp (context) { if (sources.length === 1) { const source = sources.pop() + + if (!source) { + throw errCode(new Error('could not find source'), 'ERR_INVALID_PARAMS') + } + const destinationName = destinationIsDirectory ? source.name : destination.name log(`Only one source, copying to destination ${destinationIsDirectory ? 'directory' : 'file'} ${destinationName}`) @@ -108,15 +137,29 @@ module.exports = function derp (context) { return withTimeoutOption(mfsCp) } +/** + * @param {*} destination + */ const isDirectory = (destination) => { return destination.unixfs && destination.unixfs.type && destination.unixfs.type.includes('directory') } +/** + * @param {MfsContext} context + * @param {MfsPath} source + * @param {string} destination + * @param {MfsTrail[]} destinationTrail + * @param {DefaultOptions} options + */ const copyToFile = async (context, source, destination, destinationTrail, options) => { let parent = destinationTrail.pop() + if (!parent) { + throw errCode(new Error('destination had no parent'), 'ERR_INVALID_PARAMS') + } + parent = await addSourceToParent(context, source, destination, parent, options) // update the tree with the new containing directory @@ -128,6 +171,13 @@ const copyToFile = async (context, source, destination, destinationTrail, option await updateMfsRoot(context, newRootCid, options) } +/** + * @param {MfsContext} context + * @param {MfsPath[]} sources + * @param {*} destination + * @param {MfsTrail[]} destinationTrail + * @param {DefaultOptions} options + */ const copyToDirectory = async (context, sources, destination, destinationTrail, options) => { // copy all the sources to the destination for (let i = 0; i < sources.length; i++) { @@ -145,6 +195,14 @@ const copyToDirectory = async (context, sources, destination, destinationTrail, await updateMfsRoot(context, newRootCid, options) } +/** + * @param {MfsContext} context + * @param {MfsPath} source + * @param {string} childName + * @param {*} parent + * @param {DefaultOptions} options + * @returns {Promise} + */ const addSourceToParent = async (context, source, childName, parent, options) => { const sourceBlock = await context.repo.blocks.get(source.cid) @@ -158,7 +216,8 @@ const addSourceToParent = async (context, source, childName, parent, options) => name: childName, hashAlg: options.hashAlg, cidVersion: options.cidVersion, - flush: options.flush + flush: options.flush, + shardSplitThreshold: options.shardSplitThreshold }) parent.node = node @@ -167,17 +226,3 @@ const addSourceToParent = async (context, source, childName, parent, options) => return parent } - -/** - * @typedef {Object} CpOptions - * @property {boolean} [flush=false] - * @property {number} [shardSplitThreshold=1000] - * @property {string} [hashAlg=sha2-256] - * @property {0|1} [cidVersion=0] - * @property {boolean} [parents=false] - * - * @typedef {import('./utils/types').Tuple} From - * - * @typedef {import('..').CID} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/flush.js b/packages/ipfs-core/src/components/files/flush.js index 5df96ff0e1..0b469ea122 100644 --- a/packages/ipfs-core/src/components/files/flush.js +++ b/packages/ipfs-core/src/components/files/flush.js @@ -4,20 +4,27 @@ const stat = require('./stat') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) -const defaultOptions = { - timeout: undefined, - signal: undefined -} +/** + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ +const defaultOptions = {} +/** + * @param {MfsContext} context + */ module.exports = (context) => { /** - * Flush a given path's data to disk - * - * @param {string} path - * @param {AbortOptions} [options] - * @returns {Promise} The CID of the path that has been flushed + * @type {import('ipfs-core-types/src/files').API["flush"]} */ async function mfsFlush (path, options = {}) { + /** @type {DefaultOptions} */ options = mergeOptions(defaultOptions, options) const { cid } = await stat(context)(path, options) @@ -27,8 +34,3 @@ module.exports = (context) => { return withTimeoutOption(mfsFlush) } - -/** - * @typedef {import('cids')} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/index.js b/packages/ipfs-core/src/components/files/index.js index bb1478e7b9..22ec4ff7b3 100644 --- a/packages/ipfs-core/src/components/files/index.js +++ b/packages/ipfs-core/src/components/files/index.js @@ -4,26 +4,26 @@ const createLock = require('./utils/create-lock') const isIpfs = require('is-ipfs') /** - * @typedef {Object} MFS - * @property {ReturnType} stat - * @property {ReturnType} chmod - * @property {ReturnType} cp - * @property {ReturnType} flush - * @property {ReturnType} mkdir - * @property {ReturnType} mv - * @property {ReturnType} rm - * @property {ReturnType} touch - * @property {ReturnType} write - * @property {ReturnType} read - * @property {ReturnType} ls + * @typedef {object} MfsContext + * @property {import('ipld')} ipld + * @property {import('ipfs-repo')} repo + * @property {import('ipfs-core-types/src/block').API} block */ -// These operations are read-locked at the function level and will execute simultaneously +/** + * These operations are read-locked at the function level and will execute simultaneously + * + * @type {Record} + */ const readOperations = { stat: require('./stat') } -// These operations are locked at the function level and will execute in series +/** + * These operations are locked at the function level and will execute in series + * + * @type {Record} + */ const writeOperations = { chmod: require('./chmod'), cp: require('./cp'), @@ -34,13 +34,24 @@ const writeOperations = { touch: require('./touch') } -// These operations are asynchronous and manage their own locking +/** + * These operations are asynchronous and manage their own locking + * + * @type {Record} + */ const unwrappedOperations = { write: require('./write'), read: require('./read'), ls: require('./ls') } +/** + * @param {object} arg + * @param {*} arg.options + * @param {*} arg.mfs + * @param {*} arg.operations + * @param {*} arg.lock + */ const wrap = ({ options, mfs, operations, lock }) => { @@ -55,6 +66,9 @@ const defaultOptions = { repo: null } +/** + * @param {*} options + */ function createMfs (options) { const { repoOwner @@ -67,14 +81,21 @@ function createMfs (options) { const lock = createLock(repoOwner) + /** + * @type {import('ipfs-core-types/src/basic').HigherOrderFn} + */ const readLock = (operation) => { return lock.readLock(operation) } + /** + * @type {import('ipfs-core-types/src/basic').HigherOrderFn} + */ const writeLock = (operation) => { return lock.writeLock(operation) } + /** @type {Record} */ const mfs = {} wrap({ @@ -92,14 +113,14 @@ function createMfs (options) { } /** - * @param {Object} context - * @param {import('..').IPLD} context.ipld - * @param {import('..').Block} context.block - * @param {import('..').BlockService} context.blockService - * @param {import('..').Repo} context.repo - * @param {import('..').Preload} context.preload + * @param {object} context + * @param {import('ipld')} context.ipld + * @param {import('ipfs-core-types/src/block').API} context.block + * @param {import('ipfs-block-service')} context.blockService + * @param {import('ipfs-repo')} context.repo + * @param {import('../../types').Preload} context.preload * @param {import('..').Options} context.options - * @returns {MFS} + * @returns {import('ipfs-core-types/src/files').API} */ module.exports = ({ ipld, block, blockService, repo, preload, options: constructorOptions }) => { const methods = createMfs({ @@ -110,11 +131,16 @@ module.exports = ({ ipld, block, blockService, repo, preload, options: construct repoOwner: constructorOptions.repoOwner }) + /** + * @type {import('ipfs-core-types/src/basic').HigherOrderFn} + */ const withPreload = fn => (...args) => { + // @ts-ignore cannot derive type of arg const paths = args.filter(arg => isIpfs.ipfsPath(arg) || isIpfs.cid(arg)) if (paths.length) { const options = args[args.length - 1] + // @ts-ignore it's a PreloadOptions, honest if (options && options.preload !== false) { paths.forEach(path => preload(path)) } diff --git a/packages/ipfs-core/src/components/files/ls.js b/packages/ipfs-core/src/components/files/ls.js index 951074e86b..c0f6db2515 100644 --- a/packages/ipfs-core/src/components/files/ls.js +++ b/packages/ipfs-core/src/components/files/ls.js @@ -1,104 +1,56 @@ 'use strict' -const exporter = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const toMfsPath = require('./utils/to-mfs-path') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const map = require('it-map') /** - * @param {*} fsEntry - * @returns {UnixFSEntry} + * @typedef {import('./').MfsContext} MfsContext + * @typedef {import('ipfs-core-types/src/files').MFSEntry} MFSEntry */ -const toOutput = (fsEntry) => { - /** @type FileType */ - let type = 'file' - let size = fsEntry.node.size || fsEntry.node.length - let mode - let mtime - - if (fsEntry.unixfs) { - size = fsEntry.unixfs.fileSize() - type = fsEntry.unixfs.type - - if (fsEntry.unixfs.type === 'hamt-sharded-directory') { - type = 'directory' - } - - mode = fsEntry.unixfs.mode - mtime = fsEntry.unixfs.mtime - } +/** + * @param {import('ipfs-unixfs-exporter').UnixFSEntry} fsEntry + */ +const toOutput = (fsEntry) => { + /** @type {MFSEntry} */ const output = { cid: fsEntry.cid, name: fsEntry.name, - type, - size - } - - if (mtime !== undefined) { - output.mtime = mtime + type: fsEntry.type === 'directory' ? 'directory' : 'file', + size: fsEntry.size } - if (mode !== undefined) { - output.mode = mode + if (fsEntry.type === 'file' || fsEntry.type === 'directory') { + output.mode = fsEntry.unixfs.mode + output.mtime = fsEntry.unixfs.mtime } return output } +/** + * @param {MfsContext} context + */ module.exports = (context) => { /** - * List directories in the local mutable namespace - * - * @param {string} path - * @param {AbortOptions} [options] - * @returns {AsyncIterable} - * @example - * - * ```js - * for await (const file of ipfs.files.ls('/screenshots')) { - * console.log(file.name) - * } - * // 2018-01-22T18:08:46.775Z.png - * // 2018-01-22T18:08:49.184Z.png - * ``` + * @type {import('ipfs-core-types/src/files').API["ls"]} */ async function * mfsLs (path, options = {}) { const mfsPath = await toMfsPath(context, path, options) - const fsDir = await exporter(mfsPath.mfsPath, context.ipld) + const fsEntry = await exporter(mfsPath.mfsPath, context.ipld) - // single file/node - if (!fsDir.unixfs || !fsDir.unixfs.type.includes('directory')) { - yield toOutput(fsDir) + // directory, perhaps sharded + if (fsEntry.type === 'directory') { + yield * map(fsEntry.content(options), toOutput) return } - // directory, perhaps sharded - for await (const fsEntry of fsDir.content(options)) { - yield toOutput(fsEntry) - } + // single file/node + yield toOutput(fsEntry) } return withTimeoutOption(mfsLs) } - -/** - * @typedef {import('cids')} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - * - * @typedef {object} UnixTimeObj - * @property {number} secs - the number of seconds since (positive) or before - * (negative) the Unix Epoch began - * @property {number} [nsecs] - the number of nanoseconds since the last full - * second. - * - * @typedef {'file'|'directory'} FileType - * - * @typedef {object} UnixFSEntry - * @property {CID} cid - * @property {string} name - * @property {number} [mode] - * @property {UnixTimeObj} [mtime] - * @property {number} size - * @property {FileType} type - */ diff --git a/packages/ipfs-core/src/components/files/mkdir.js b/packages/ipfs-core/src/components/files/mkdir.js index 0763dcb8f7..065b088b4c 100644 --- a/packages/ipfs-core/src/components/files/mkdir.js +++ b/packages/ipfs-core/src/components/files/mkdir.js @@ -2,7 +2,7 @@ const errCode = require('err-code') const log = require('debug')('ipfs:mfs:mkdir') -const exporter = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const createNode = require('./utils/create-node') const toPathComponents = require('./utils/to-path-components') const updateMfsRoot = require('./utils/update-mfs-root') @@ -12,30 +12,46 @@ const withMfsRoot = require('./utils/with-mfs-root') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * @typedef {import('ipld-dag-pb').DAGLink} DAGLink + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids')} CID + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('ipfs-core-types/src/basic').ToMTime} Mtime + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} parents + * @property {HashName} hashAlg + * @property {CIDVersion} cidVersion + * @property {number} shardSplitThreshold + * @property {boolean} flush + * @property {number} [mode] + * @property {Mtime} [mtime] + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { parents: false, hashAlg: 'sha2-256', cidVersion: 0, shardSplitThreshold: 1000, - flush: true, - mode: null, - mtime: null, - signal: undefined + flush: true } +/** + * @param {MfsContext} context + */ module.exports = (context) => { /** - * Make a directory in your MFS - * - * @param {string} path - * @param {MkdirOptions & AbortOptions} [options] - * @returns {Promise} - * @example - * ```js - * await ipfs.files.mkdir('/my/beautiful/directory') - * ``` + * @type {import('ipfs-core-types/src/files').API["mkdir"]} */ async function mfsMkdir (path, options = {}) { + /** @type {DefaultOptions} */ const opts = mergeOptions(defaultOptions, options) if (!path) { @@ -76,8 +92,10 @@ module.exports = (context) => { try { parent = await exporter(subPath, context.ipld) - log(`${subPath} existed`) - log(`${subPath} had children ${parent.node.Links.map(link => link.Name)}`) + + if (parent.type !== 'file' && parent.type !== 'directory') { + throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') + } if (i === pathComponents.length) { if (opts.parents) { @@ -118,6 +136,14 @@ module.exports = (context) => { return withTimeoutOption(mfsMkdir) } +/** + * @param {MfsContext} context + * @param {string} childName + * @param {{ cid: CID, node: { size: number }}} emptyDir + * @param {{ cid?: CID, node?: DAGNode }} parent + * @param {{ name: string, cid: CID }[]} trail + * @param {DefaultOptions} options + */ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) => { log(`Adding empty dir called ${childName} to ${parent.cid}`) @@ -129,7 +155,8 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) name: childName, hashAlg: options.hashAlg, cidVersion: options.cidVersion, - flush: options.flush + flush: options.flush, + shardSplitThreshold: options.shardSplitThreshold }) trail[trail.length - 1].cid = result.cid @@ -139,20 +166,3 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) cid: emptyDir.cid }) } - -/** - * @typedef {Object} MkdirOptions - * @property {boolean} [parents=false] - If true, create intermediate directories - * @property {ToMode} [mode] - An integer that represents the file mode - * @property {ToMTime} [mtime] - A Date object, an object with `{ secs, nsecs }` properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of `process.hrtime() - * @property {boolean} [flush] - If true the changes will be immediately flushed to disk - * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries - * @property {CIDVersion} [cidVersion=0] - The CID version to use for any updated entries - * - * @typedef {import('cids')} CID - * @typedef {import('cids').CIDVersion} CIDVersion - * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions - * @typedef {import('ipfs-core-types/src/files').MTime} Mtime - * @typedef {import('ipfs-core-types/src/files').ToMTime} ToMTime - * @typedef {import('ipfs-core-types/src/files').ToMode} ToMode - */ diff --git a/packages/ipfs-core/src/components/files/mv.js b/packages/ipfs-core/src/components/files/mv.js index e58ad02963..c10273433e 100644 --- a/packages/ipfs-core/src/components/files/mv.js +++ b/packages/ipfs-core/src/components/files/mv.js @@ -1,62 +1,54 @@ 'use strict' -const toSources = require('./utils/to-sources') const cp = require('./cp') const rm = require('./rm') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} parents + * @property {boolean} recursive + * @property {boolean} flush + * @property {CIDVersion} cidVersion + * @property {HashName} hashAlg + * @property {number} shardSplitThreshold + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { parents: false, recursive: false, flush: true, cidVersion: 0, hashAlg: 'sha2-256', - shardSplitThreshold: 1000, - signal: undefined + shardSplitThreshold: 1000 } /** - * - * @param {any} context + * @param {MfsContext} context */ module.exports = (context) => { /** - * - * @param {[...from:From, to:string, options?:MvOptions]} args - * @returns {Promise} + * @type {import('ipfs-core-types/src/files').API["mv"]} */ - async function mfsMv (...args) { - const { - sources, - options - } = await toSources(context, args, defaultOptions) - - const cpArgs = sources - .map(source => source.path).concat(options) - - // remove the last source as it'll be the destination - const rmArgs = sources - .slice(0, -1) - .map(source => source.path) - .concat(Object.assign(options, { - recursive: true - })) - - await cp(context).apply(null, cpArgs) - await rm(context).apply(null, rmArgs) + async function mfsMv (from, to, options = {}) { + /** @type {DefaultOptions} */ + const opts = mergeOptions(defaultOptions, options) + + await cp(context)(from, to, opts) + await rm(context)(from, { + ...opts, + recursive: true + }) } return withTimeoutOption(mfsMv) } - -/** - * @typedef {Object} MvOptions - * @property {boolean} [parents=false] - * @property {boolean} [flush=false] - * @property {string} [hashAlg='sha2-256'] - * @property {0|1} [cidVersion] - * - * @typedef {import('./utils/types').Tuple} From - * @typedef {import('cids')} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/read.js b/packages/ipfs-core/src/components/files/read.js index ea697c85b7..eb71aecb09 100644 --- a/packages/ipfs-core/src/components/files/read.js +++ b/packages/ipfs-core/src/components/files/read.js @@ -1,40 +1,37 @@ 'use strict' -const exporter = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {number} offset + * @property {number} length + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { offset: 0, - length: Infinity, - signal: undefined + length: Infinity } /** - * @param {any} context + * @param {MfsContext} context */ module.exports = (context) => { /** - * Read a file - * - * @param {string | CID} path - An MFS path, IPFS Path or CID to read - * @param {ReadOptions & AbortOptions} [options] - * @returns {AsyncIterable} - * @example - * ```js - * const chunks = [] - * - * for await (const chunk of ipfs.files.read('/hello-world')) { - * chunks.push(chunk) - * } - * - * console.log(uint8ArrayConcat(chunks).toString()) - * // Hello, World! - * ``` + * @type {import('ipfs-core-types/src/files').API["read"]} */ function mfsRead (path, options = {}) { + /** @type {DefaultOptions} */ options = mergeOptions(defaultOptions, options) return { @@ -42,7 +39,7 @@ module.exports = (context) => { const mfsPath = await toMfsPath(context, path, options) const result = await exporter(mfsPath.mfsPath, context.ipld) - if (result.unixfs.type !== 'file') { + if (result.type !== 'file') { throw errCode(new Error(`${path} was not a file`), 'ERR_NOT_FILE') } @@ -62,12 +59,3 @@ module.exports = (context) => { return withTimeoutOption(mfsRead) } - -/** - * @typedef {Object} ReadOptions - * @property {number} [offset] - An offset to start reading the file from - * @property {number} [length] - An optional max length to read from the file - * - * @typedef {import('cids')} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/rm.js b/packages/ipfs-core/src/components/files/rm.js index d9677f99e8..7d7fe0d395 100644 --- a/packages/ipfs-core/src/components/files/rm.js +++ b/packages/ipfs-core/src/components/files/rm.js @@ -9,23 +9,37 @@ const toMfsPath = require('./utils/to-mfs-path') const toTrail = require('./utils/to-trail') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} recursive + * @property {CIDVersion} cidVersion + * @property {HashName} hashAlg + * @property {boolean} flush + * @property {number} shardSplitThreshold + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { recursive: false, cidVersion: 0, hashAlg: 'sha2-256', flush: true, - signal: undefined + shardSplitThreshold: 1000 } /** - * @param {any} context + * @param {MfsContext} context */ module.exports = (context) => { /** - * Remove a file or directory - * - * @param {[...paths: Paths, options?:RmOptions]} args - * @returns {Promise} + * @type {import('ipfs-core-types/src/files').API["rm"]} */ async function mfsRm (...args) { const { @@ -51,6 +65,11 @@ module.exports = (context) => { return withTimeoutOption(mfsRm) } +/** + * @param {MfsContext} context + * @param {string} path + * @param {DefaultOptions} options + */ const removePath = async (context, path, options) => { const mfsPath = await toMfsPath(context, path, options) const trail = await toTrail(context, mfsPath.mfsPath) @@ -73,7 +92,8 @@ const removePath = async (context, path, options) => { name: child.name, hashAlg: options.hashAlg, cidVersion: options.cidVersion, - flush: options.flush + flush: options.flush, + shardSplitThreshold: options.shardSplitThreshold }) parent.cid = cid @@ -84,15 +104,3 @@ const removePath = async (context, path, options) => { // Update the MFS record with the new CID for the root of the tree await updateMfsRoot(context, newRootCid, options) } - -/** - * @typedef {Object} RmOptions - * @property {boolean} [recursive=false] - If true all paths under the specifed path(s) will be removed - * @property {boolean} [flush=false] - If true the changes will be immediately flushed to disk - * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries - * @property {0|1} [cidVersion] - The CID version to use for any updated entries - * - * @typedef {import('..').CID} CID - * @typedef {import('./utils/types').Tuple} Paths - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/stat.js b/packages/ipfs-core/src/components/files/stat.js index e1a3690f8f..152239eaf2 100644 --- a/packages/ipfs-core/src/components/files/stat.js +++ b/packages/ipfs-core/src/components/files/stat.js @@ -2,29 +2,39 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') -const exporter = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const log = require('debug')('ipfs:mfs:stat') const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} withLocal + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { - withLocal: false, - signal: undefined + withLocal: false } /** - * @param {Object} context - * @param {import('..').IPLD} context.ipld + * @typedef {import('ipfs-core-types/src/files').StatResult} StatResult + */ + +/** + * @param {MfsContext} context */ module.exports = (context) => { /** - * Get file or directory statistics - * - * @param {string} path - The MFS path return statistics from - * @param {StatOptions & AbortOptions} [options] - * @returns {Promise} - An object containing the file/directory status + * @type {import('ipfs-core-types/src/files').API["stat"]} */ async function mfsStat (path, options) { + /** @type {DefaultOptions} */ options = mergeOptions(defaultOptions, options) log(`Fetching stats for ${path}`) @@ -48,21 +58,20 @@ module.exports = (context) => { throw err } - if (!statters[file.cid.codec]) { + if (!statters[file.type]) { throw new Error(`Cannot stat codec ${file.cid.codec}`) } - return statters[file.cid.codec](file) + return statters[file.type](file) } return withTimeoutOption(mfsStat) } -/** @type {Record Stat>} */ +/** @type {Record StatResult>} */ const statters = { /** - * @param {any} file - * @returns {Stat} + * @param {import('ipfs-unixfs-exporter').RawNode} file */ raw: (file) => { return { @@ -77,77 +86,76 @@ const statters = { } }, /** - * @param {any} file - * @returns {Stat} + * @param {import('ipfs-unixfs-exporter').UnixFSFile} file */ - 'dag-pb': (file) => { - const blocks = file.node.Links.length - const size = file.node.size - const cumulativeSize = file.node.size - - /** @type {Stat} */ - const output = { + file: (file) => { + /** @type {StatResult} */ + const stat = { cid: file.cid, type: 'file', - size: size, - cumulativeSize: cumulativeSize, - blocks: blocks, + size: file.unixfs.fileSize(), + cumulativeSize: file.node.size, + blocks: file.unixfs.blockSizes.length, local: undefined, sizeLocal: undefined, - withLocality: false + withLocality: false, + mode: file.unixfs.mode } - if (file.unixfs) { - output.size = file.unixfs.fileSize() - - // for go-ipfs compatibility - if (file.unixfs.type === 'hamt-sharded-directory') { - output.type = 'directory' - } else { - output.type = file.unixfs.type - } - - output.mode = file.unixfs.mode - - if (file.unixfs.isDirectory()) { - output.size = 0 - output.cumulativeSize = file.node.size - } + if (file.unixfs.mtime) { + stat.mtime = file.unixfs.mtime + } - if (output.type === 'file') { - output.blocks = file.unixfs.blockSizes.length - } + return stat + }, + /** + * @param {import('ipfs-unixfs-exporter').UnixFSDirectory} file + */ + directory: (file) => { + /** @type {StatResult} */ + const stat = { + cid: file.cid, + type: 'directory', + size: 0, + cumulativeSize: file.node.size, + blocks: file.node.Links.length, + local: undefined, + sizeLocal: undefined, + withLocality: false, + mode: file.unixfs.mode + } - if (file.unixfs.mtime) { - output.mtime = file.unixfs.mtime - } + if (file.unixfs.mtime) { + stat.mtime = file.unixfs.mtime } - return output + return stat }, /** - * @param {any} file - * @returns {Stat} + * @param {import('ipfs-unixfs-exporter').ObjectNode} file */ - 'dag-cbor': (file) => { - // @ts-ignore - This is incompatible with Stat object - // @TODO - https://github.com/ipfs/js-ipfs/issues/3325 + object: (file) => { + /** @type {StatResult} */ return { cid: file.cid, + size: file.node.length, + cumulativeSize: file.node.length, + type: 'file', // for go compatibility + blocks: 0, local: undefined, sizeLocal: undefined, withLocality: false } }, /** - * @param {any} file - * @returns {Stat} + * @param {import('ipfs-unixfs-exporter').IdentityNode} file */ identity: (file) => { + /** @type {StatResult} */ return { cid: file.cid, - size: file.node.digest.length, - cumulativeSize: file.node.digest.length, + size: file.node.length, + cumulativeSize: file.node.length, blocks: 0, type: 'file', // for go compatibility local: undefined, @@ -156,32 +164,3 @@ const statters = { } } } - -/** - * @typedef {Object} StatOptions - * @property {boolean} [hash=false] - If true, return only the CID - * @property {boolean} [size=false] - If true, return only the size - * @property {boolean} [withLocal=false] - If true, compute the amount of the DAG that is local and if possible the total size - * - * @typedef {Object} Stat - * @property {CID} cid - Content idenntifier - * @property {number} size - An integer with the file size in bytes. - * @property {number} cumulativeSize - An integer with the size of the - * DAGNodes making up the file in bytes. - * @property {'directory'|'file'} type - Type of the file which is either directory - * or file. - * @property {number} blocks - If type is directory, this is the number of files - * in the directory. If it is file it is the number of blocks that make up the - * file. - * @property {boolean} [withLocality] - A boolean to indicate if locality - * information is present. - * @property {boolean} [local] - Is a boolean to indicate if the queried dag is - * fully present locally. - * @property {number} [sizeLocal] - An integer indicating the cumulative size of - * the data present locally. - * @property {number} [mode] - File mode - * @property {import('ipfs-core-types/src/files').MTime} [mtime] - Modification time - * - * @typedef {import('cids')} CID - * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/files/touch.js b/packages/ipfs-core/src/components/files/touch.js index 6c6dbc3d7d..3a27202758 100644 --- a/packages/ipfs-core/src/components/files/touch.js +++ b/packages/ipfs-core/src/components/files/touch.js @@ -4,7 +4,7 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const log = require('debug')('ipfs:mfs:touch') const errCode = require('err-code') -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const toTrail = require('./utils/to-trail') const addLink = require('./utils/add-link') const updateTree = require('./utils/update-tree') @@ -14,35 +14,40 @@ const mc = require('multicodec') const mh = require('multihashing-async').multihash const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('ipfs-core-types/src/basic').ToMTime} Mtime + * @typedef {import('./').MfsContext} MfsContext + * @typedef {object} DefaultOptions + * @property {boolean} flush + * @property {number} shardSplitThreshold + * @property {CIDVersion} cidVersion + * @property {HashName} hashAlg + * @property {Mtime} [mtime] + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { - /** @type {ToMTime|undefined} */ - mtime: undefined, flush: true, shardSplitThreshold: 1000, cidVersion: 0, - hashAlg: 'sha2-256', - signal: undefined + hashAlg: 'sha2-256' } +/** + * @param {MfsContext} context + */ module.exports = (context) => { /** - * Update the mtime of a file or directory - * - * @param {string} path - The MFS path to update the mtime for - * @param {TouchOptions & AbortOptions} [options] - * @returns {Promise} - * - * @example - * ```js - * // set the mtime to the current time - * await ipfs.files.touch('/path/to/file.txt') - * // set the mtime to a specific time - * await ipfs.files.touch('/path/to/file.txt', { - * mtime: new Date('May 23, 2014 14:45:14 -0700') - * }) - * ``` + * @type {import('ipfs-core-types/src/files').API["touch"]} */ async function mfsTouch (path, options = {}) { + /** @type {DefaultOptions} */ const settings = mergeOptions(defaultOptions, options) settings.mtime = settings.mtime || new Date() @@ -63,6 +68,7 @@ module.exports = (context) => { if (!exists) { const metadata = new UnixFS({ type: 'file', + // @ts-ignore TODO: restore hrtime support to ipfs-unixfs constructor - it's in the code, just not the signature mtime: settings.mtime }) node = new DAGNode(metadata.marshal()) @@ -81,6 +87,8 @@ module.exports = (context) => { node = await context.ipld.get(cid) const metadata = UnixFS.unmarshal(node.Data) + + // @ts-ignore TODO: restore setting all date types as mtime - it's in the code, just not the signature metadata.mtime = settings.mtime node = new DAGNode(metadata.marshal(), node.Links) @@ -118,15 +126,3 @@ module.exports = (context) => { return withTimeoutOption(mfsTouch) } - -/** - * @typedef {Object} TouchOptions - * @property {ToMTime} [mtime] - A Date object, an object with `{ secs, nsecs }` properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of `process.hrtime()` - * @property {boolean} [flush=false] - If true the changes will be immediately flushed to disk - * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries - * @property {import('cids').CIDVersion} [cidVersion] - The CID version to use for any updated entries - * - * @typedef {import('cids')} CID - * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions - * @typedef {import('ipfs-core-types/src/files').ToMTime} ToMTime - */ diff --git a/packages/ipfs-core/src/components/files/utils/add-link.js b/packages/ipfs-core/src/components/files/utils/add-link.js index b622f64d33..a316c354e3 100644 --- a/packages/ipfs-core/src/components/files/utils/add-link.js +++ b/packages/ipfs-core/src/components/files/utils/add-link.js @@ -6,11 +6,15 @@ const { } = require('ipld-dag-pb') const CID = require('cids') const log = require('debug')('ipfs:mfs:core:utils:add-link') -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') +// @ts-ignore - refactor this to not need deep require const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded') +// @ts-ignore - refactor this to not need deep require +const defaultImporterOptions = require('ipfs-unixfs-importer/src/options') const { updateHamtDirectory, recreateHamtLevel, + recreateInitialHamtLevel, createShard, toPrefix, addLinksToHamtBucket @@ -20,19 +24,41 @@ const mc = require('multicodec') const mh = require('multihashing-async').multihash const last = require('it-last') +/** + * @typedef {import('ipfs-unixfs').Mtime} Mtime + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('hamt-sharding').Bucket} Bucket + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @param {MfsContext} context + * @param {object} options + * @param {CID} options.cid + * @param {string} options.name + * @param {number} options.size + * @param {number} options.shardSplitThreshold + * @param {HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + * @param {CID} [options.parentCid] + * @param {DAGNode} [options.parent] + */ const addLink = async (context, options) => { - if (!options.parentCid && !options.parent) { - throw errCode(new Error('No parent node or CID passed to addLink'), 'EINVALIDPARENT') - } + let parent = options.parent - if (options.parentCid && !CID.isCID(options.parentCid)) { - throw errCode(new Error('Invalid CID passed to addLink'), 'EINVALIDPARENTCID') - } + if (options.parentCid) { + if (!CID.isCID(options.parentCid)) { + throw errCode(new Error('Invalid CID passed to addLink'), 'EINVALIDPARENTCID') + } - if (!options.parent) { log(`Loading parent node ${options.parentCid}`) + parent = await context.ipld.get(options.parentCid) + } - options.parent = await context.ipld.get(options.parentCid) + if (!parent) { + throw errCode(new Error('No parent node or CID passed to addLink'), 'EINVALIDPARENT') } if (!options.cid) { @@ -51,19 +77,23 @@ const addLink = async (context, options) => { throw errCode(new Error('No child size passed to addLink'), 'EINVALIDCHILDSIZE') } - const meta = UnixFS.unmarshal(options.parent.Data) + const meta = UnixFS.unmarshal(parent.Data) if (meta.type === 'hamt-sharded-directory') { log('Adding link to sharded directory') - return addToShardedDirectory(context, options) + return addToShardedDirectory(context, { + ...options, + parent + }) } - if (options.parent.Links.length >= options.shardSplitThreshold) { + if (parent.Links.length >= options.shardSplitThreshold) { log('Converting directory to sharded directory') return convertToShardedDirectory(context, { ...options, + parent, mtime: meta.mtime, mode: meta.mode }) @@ -71,9 +101,25 @@ const addLink = async (context, options) => { log(`Adding ${options.name} (${options.cid}) to regular directory`) - return addToDirectory(context, options) + return addToDirectory(context, { + ...options, + parent + }) } +/** + * @param {MfsContext} context + * @param {object} options + * @param {CID} options.cid + * @param {string} options.name + * @param {number} options.size + * @param {DAGNode} options.parent + * @param {HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + * @param {Mtime} [options.mtime] + * @param {number} [options.mode] + */ const convertToShardedDirectory = async (context, options) => { const result = await createShard(context, options.parent.Links.map(link => ({ name: link.Name, @@ -90,6 +136,19 @@ const convertToShardedDirectory = async (context, options) => { return result } +/** + * @param {MfsContext} context + * @param {object} options + * @param {CID} options.cid + * @param {string} options.name + * @param {number} options.size + * @param {DAGNode} options.parent + * @param {HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + * @param {Mtime} [options.mtime] + * @param {number} [options.mode] + */ const addToDirectory = async (context, options) => { options.parent.rmLink(options.name) options.parent.addLink(new DAGLink(options.name, options.size, options.cid)) @@ -98,7 +157,13 @@ const addToDirectory = async (context, options) => { if (node.mtime) { // Update mtime if previously set - node.mtime = new Date() + const ms = Date.now() + const secs = Math.floor(ms / 1000) + + node.mtime = { + secs: secs, + nsecs: (ms - (secs * 1000)) * 1000 + } options.parent = new DAGNode(node.marshal(), options.parent.Links) } @@ -119,21 +184,38 @@ const addToDirectory = async (context, options) => { } } +/** + * @param {MfsContext} context + * @param {object} options + * @param {CID} options.cid + * @param {string} options.name + * @param {number} options.size + * @param {DAGNode} options.parent + * @param {HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + */ const addToShardedDirectory = async (context, options) => { const { shard, path } = await addFileToShardedDirectory(context, options) - const result = await last(shard.flush('', context.block)) + const result = await last(shard.flush(context.block)) + /** @type {DAGNode} */ const node = await context.ipld.get(result.cid) // we have written out the shard, but only one sub-shard will have been written so replace it in the original shard const oldLink = options.parent.Links .find(link => link.Name.substring(0, 2) === path[0].prefix) + /** @type {DAGLink | undefined} */ const newLink = node.Links .find(link => link.Name.substring(0, 2) === path[0].prefix) + if (!newLink) { + throw new Error(`No link found with prefix ${path[0].prefix}`) + } + if (oldLink) { options.parent.rmLink(oldLink.Name) } @@ -143,6 +225,16 @@ const addToShardedDirectory = async (context, options) => { return updateHamtDirectory(context, options.parent.Links, path[0].bucket, options) } +/** + * @param {MfsContext} context + * @param {object} options + * @param {CID} options.cid + * @param {string} options.name + * @param {number} options.size + * @param {DAGNode} options.parent + * @param {HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + */ const addFileToShardedDirectory = async (context, options) => { const file = { name: options.name, @@ -151,8 +243,9 @@ const addFileToShardedDirectory = async (context, options) => { } // start at the root bucket and descend, loading nodes as we go - const rootBucket = await recreateHamtLevel(options.parent.Links) + const rootBucket = await recreateInitialHamtLevel(options.parent.Links) const node = UnixFS.unmarshal(options.parent.Data) + const importerOptions = defaultImporterOptions() const shard = new DirSharded({ root: true, @@ -163,7 +256,12 @@ const addFileToShardedDirectory = async (context, options) => { dirty: true, flat: false, mode: node.mode - }, options) + }, { + hamtHashFn: importerOptions.hamtHashFn, + hamtHashCode: importerOptions.hamtHashCode, + hamtBucketBits: importerOptions.hamtBucketBits, + ...options + }) shard._bucket = rootBucket if (node.mtime) { @@ -182,6 +280,10 @@ const addFileToShardedDirectory = async (context, options) => { index++ const node = segment.node + if (!node) { + throw new Error('Segment had no node') + } + const link = node.Links .find(link => link.Name.substring(0, 2) === segment.prefix) @@ -248,16 +350,18 @@ const addFileToShardedDirectory = async (context, options) => { } } +/** + * @param {{ pos: number, bucket: Bucket }} position + * @returns {{ bucket: Bucket, prefix: string, node?: DAGNode }[]} + */ const toBucketPath = (position) => { - let bucket = position.bucket - let positionInBucket = position.pos const path = [{ - bucket, - prefix: toPrefix(positionInBucket) + bucket: position.bucket, + prefix: toPrefix(position.pos) }] - bucket = position.bucket._parent - positionInBucket = position.bucket._posAtParent + let bucket = position.bucket._parent + let positionInBucket = position.bucket._posAtParent while (bucket) { path.push({ diff --git a/packages/ipfs-core/src/components/files/utils/create-lock.js b/packages/ipfs-core/src/components/files/utils/create-lock.js index fa3c6192dd..102a5e8204 100644 --- a/packages/ipfs-core/src/components/files/utils/create-lock.js +++ b/packages/ipfs-core/src/components/files/utils/create-lock.js @@ -1,10 +1,21 @@ 'use strict' +// @ts-ignore const mortice = require('mortice') +/** + * @typedef {object} Lock + * @property {import('ipfs-core-types/src/basic').HigherOrderFn} readLock + * @property {import('ipfs-core-types/src/basic').HigherOrderFn} writeLock + */ + +/** @type {Lock} */ let lock -module.exports = (repoOwner) => { +/** + * @param {boolean} [repoOwner] + */ +module.exports = (repoOwner = false) => { if (lock) { return lock } diff --git a/packages/ipfs-core/src/components/files/utils/create-node.js b/packages/ipfs-core/src/components/files/utils/create-node.js index dede8f7fc6..287e752931 100644 --- a/packages/ipfs-core/src/components/files/utils/create-node.js +++ b/packages/ipfs-core/src/components/files/utils/create-node.js @@ -1,17 +1,34 @@ 'use strict' -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const { DAGNode } = require('ipld-dag-pb') const mc = require('multicodec') const mh = require('multihashing-async').multihash +/** + * @typedef {import('ipfs-core-types/src/basic').ToMTime} Mtime + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @param {MfsContext} context + * @param {'file' | 'directory'} type + * @param {object} options + * @param {import('multihashes').HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + * @param {Mtime} [options.mtime] + * @param {number} [options.mode] + */ const createNode = async (context, type, options) => { const hashAlg = mh.names[options.hashAlg] const metadata = new UnixFS({ type, mode: options.mode, + // @ts-ignore TODO: restore hrtime support to ipfs-unixfs constructor - it's in the code, just not the signature mtime: options.mtime }) diff --git a/packages/ipfs-core/src/components/files/utils/hamt-utils.js b/packages/ipfs-core/src/components/files/utils/hamt-utils.js index 17466eff73..7cc65587dc 100644 --- a/packages/ipfs-core/src/components/files/utils/hamt-utils.js +++ b/packages/ipfs-core/src/components/files/utils/hamt-utils.js @@ -3,15 +3,42 @@ const { DAGNode } = require('ipld-dag-pb') -const Bucket = require('hamt-sharding/src/bucket') +const { + Bucket, + createHAMT +} = require('hamt-sharding') +// @ts-ignore - refactor this to not need deep require const DirSharded = require('ipfs-unixfs-importer/src/dir-sharded') +// @ts-ignore - refactor this to not need deep require +const defaultImporterOptions = require('ipfs-unixfs-importer/src/options') const log = require('debug')('ipfs:mfs:core:utils:hamt-utils') -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const mc = require('multicodec') const mh = require('multihashing-async').multihash const last = require('it-last') +/** + * @typedef {import('ipld-dag-pb').DAGLink} DAGLink + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('ipfs-unixfs').Mtime} Mtime + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids')} CID + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @param {MfsContext} context + * @param {DAGLink[]} links + * @param {Bucket} bucket + * @param {object} options + * @param {DAGNode} options.parent + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + * @param {HashName} options.hashAlg + */ const updateHamtDirectory = async (context, links, bucket, options) => { + const importerOptions = defaultImporterOptions() + // update parent with new bit field const data = Uint8Array.from(bucket._children.bitField().reverse()) const node = UnixFS.unmarshal(options.parent.Data) @@ -19,7 +46,7 @@ const updateHamtDirectory = async (context, links, bucket, options) => { type: 'hamt-sharded-directory', data, fanout: bucket.tableSize(), - hashType: DirSharded.hashFn.code, + hashType: importerOptions.hamtHashCode, mode: node.mode, mtime: node.mtime }) @@ -39,22 +66,45 @@ const updateHamtDirectory = async (context, links, bucket, options) => { } } +/** + * @param {DAGLink[]} links + * @param {Bucket} rootBucket + * @param {Bucket} parentBucket + * @param {number} positionAtParent + */ const recreateHamtLevel = async (links, rootBucket, parentBucket, positionAtParent) => { // recreate this level of the HAMT const bucket = new Bucket({ - hashFn: DirSharded.hashFn, - hash: parentBucket ? parentBucket._options.hash : undefined + hash: rootBucket._options.hash, + bits: rootBucket._options.bits }, parentBucket, positionAtParent) - - if (parentBucket) { - parentBucket._putObjectAt(positionAtParent, bucket) - } + parentBucket._putObjectAt(positionAtParent, bucket) await addLinksToHamtBucket(links, bucket, rootBucket) return bucket } +/** + * @param {DAGLink[]} links + */ +const recreateInitialHamtLevel = async (links) => { + const importerOptions = defaultImporterOptions() + const bucket = createHAMT({ + hashFn: importerOptions.hamtHashFn, + bits: importerOptions.hamtBucketBits + }) + + await addLinksToHamtBucket(links, bucket, bucket) + + return bucket +} + +/** + * @param {DAGLink[]} links + * @param {Bucket} bucket + * @param {Bucket} rootBucket + */ const addLinksToHamtBucket = async (links, bucket, rootBucket) => { await Promise.all( links.map(link => { @@ -62,13 +112,14 @@ const addLinksToHamtBucket = async (links, bucket, rootBucket) => { const pos = parseInt(link.Name, 16) bucket._putObjectAt(pos, new Bucket({ - hashFn: DirSharded.hashFn + hash: rootBucket._options.hash, + bits: rootBucket._options.bits }, bucket, pos)) return Promise.resolve() } - return (rootBucket || bucket).put(link.Name.substring(2), { + return rootBucket.put(link.Name.substring(2), { size: link.Tsize, cid: link.Hash }) @@ -76,20 +127,29 @@ const addLinksToHamtBucket = async (links, bucket, rootBucket) => { ) } +/** + * @param {number} position + */ const toPrefix = (position) => { return position - .toString('16') + .toString(16) .toUpperCase() .padStart(2, '0') .substring(0, 2) } +/** + * @param {MfsContext} context + * @param {string} fileName + * @param {DAGNode} rootNode + */ const generatePath = async (context, fileName, rootNode) => { // start at the root bucket and descend, loading nodes as we go - const rootBucket = await recreateHamtLevel(rootNode.Links, null, null, null) + const rootBucket = await recreateInitialHamtLevel(rootNode.Links) const position = await rootBucket._findNewBucketAndPos(fileName) // the path to the root bucket + /** @type {{ bucket: Bucket, prefix: string, node?: DAGNode }[]} */ const path = [{ bucket: position.bucket, prefix: toPrefix(position.pos) @@ -102,6 +162,7 @@ const generatePath = async (context, fileName, rootNode) => { prefix: toPrefix(currentBucket._posAtParent) }) + // @ts-ignore currentBucket = currentBucket._parent } @@ -112,6 +173,10 @@ const generatePath = async (context, fileName, rootNode) => { for (let i = 0; i < path.length; i++) { const segment = path[i] + if (!segment.node) { + throw new Error('Could not generate HAMT path') + } + // find prefix in links const link = segment.node.Links .filter(link => link.Name.substring(0, 2) === segment.prefix) @@ -172,7 +237,16 @@ const generatePath = async (context, fileName, rootNode) => { } } -const createShard = async (context, contents, options) => { +/** + * @param {MfsContext} context + * @param {{ name: string, size: number, cid: CID }[]} contents + * @param {object} [options] + * @param {Mtime} [options.mtime] + * @param {number} [options.mode] + */ +const createShard = async (context, contents, options = {}) => { + const importerOptions = defaultImporterOptions() + const shard = new DirSharded({ root: true, dir: true, @@ -184,6 +258,9 @@ const createShard = async (context, contents, options) => { mtime: options.mtime, mode: options.mode }, { + hamtHashFn: importerOptions.hamtHashFn, + hamtHashCode: importerOptions.hamtHashCode, + hamtBucketBits: importerOptions.hamtBucketBits, ...options, codec: 'dag-pb' }) @@ -195,13 +272,14 @@ const createShard = async (context, contents, options) => { }) } - return last(shard.flush('', context.block, null)) + return last(shard.flush(context.block)) } module.exports = { generatePath, updateHamtDirectory, recreateHamtLevel, + recreateInitialHamtLevel, addLinksToHamtBucket, toPrefix, createShard diff --git a/packages/ipfs-core/src/components/files/utils/remove-link.js b/packages/ipfs-core/src/components/files/utils/remove-link.js index 572521bc93..2d260b6a71 100644 --- a/packages/ipfs-core/src/components/files/utils/remove-link.js +++ b/packages/ipfs-core/src/components/files/utils/remove-link.js @@ -1,12 +1,11 @@ 'use strict' const { - DAGNode, DAGLink } = require('ipld-dag-pb') const CID = require('cids') const log = require('debug')('ipfs:mfs:core:utils:remove-link') -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const { generatePath, updateHamtDirectory @@ -15,38 +14,78 @@ const errCode = require('err-code') const mc = require('multicodec') const mh = require('multihashing-async').multihash +/** + * @typedef {import('../').MfsContext} MfsContext + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('hamt-sharding').Bucket} Bucket + * @typedef {import('ipld-dag-pb').DAGNode} DAGNode + * + * @typedef {object} RemoveLinkOptions + * @property {string} name + * @property {number} shardSplitThreshold + * @property {HashName} hashAlg + * @property {CIDVersion} cidVersion + * @property {boolean} flush + * @property {CID} [parentCid] + * @property {DAGNode} [parent] + * + * @typedef {object} RemoveLinkOptionsInternal + * @property {string} name + * @property {number} shardSplitThreshold + * @property {HashName} hashAlg + * @property {CIDVersion} cidVersion + * @property {boolean} flush + * @property {DAGNode} parent + */ + +/** + * @param {MfsContext} context + * @param {RemoveLinkOptions} options + */ const removeLink = async (context, options) => { - if (!options.parentCid && !options.parent) { - throw errCode(new Error('No parent node or CID passed to removeLink'), 'EINVALIDPARENT') - } + const parent = options.parent - if (options.parentCid && !CID.isCID(options.parentCid)) { - throw errCode(new Error('Invalid CID passed to removeLink'), 'EINVALIDPARENTCID') - } + if (options.parentCid) { + if (!CID.isCID(options.parentCid)) { + throw errCode(new Error('Invalid CID passed to removeLink'), 'EINVALIDPARENTCID') + } - if (!options.parent) { log(`Loading parent node ${options.parentCid}`) - options.parent = await context.ipld.get(options.parentCid) } + if (!parent) { + throw errCode(new Error('No parent node or CID passed to removeLink'), 'EINVALIDPARENT') + } + if (!options.name) { throw errCode(new Error('No child name passed to removeLink'), 'EINVALIDCHILDNAME') } - const meta = UnixFS.unmarshal(options.parent.Data) + const meta = UnixFS.unmarshal(parent.Data) if (meta.type === 'hamt-sharded-directory') { log(`Removing ${options.name} from sharded directory`) - return removeFromShardedDirectory(context, options) + return removeFromShardedDirectory(context, { + ...options, + parent + }) } log(`Removing link ${options.name} regular directory`) - return removeFromDirectory(context, options) + return removeFromDirectory(context, { + ...options, + parent + }) } +/** + * @param {MfsContext} context + * @param {RemoveLinkOptionsInternal} options + */ const removeFromDirectory = async (context, options) => { const hashAlg = mh.names[options.hashAlg] @@ -64,6 +103,10 @@ const removeFromDirectory = async (context, options) => { } } +/** + * @param {MfsContext} context + * @param {RemoveLinkOptionsInternal} options + */ const removeFromShardedDirectory = async (context, options) => { const { rootBucket, path @@ -73,65 +116,86 @@ const removeFromShardedDirectory = async (context, options) => { const { node - } = await updateShard(context, path, { - name: options.name, - cid: options.cid, - size: options.size, - hashAlg: options.hashAlg, - cidVersion: options.cidVersion, - flush: options.flush - }, options) + } = await updateShard(context, path, options.name, options) return updateHamtDirectory(context, node.Links, rootBucket, options) } -const updateShard = async (context, positions, child, options) => { +/** + * @param {MfsContext} context + * @param {{ bucket: Bucket, prefix: string, node?: DAGNode }[]} positions + * @param {string} name + * @param {RemoveLinkOptionsInternal} options + * @returns {Promise<{ node: DAGNode, cid: CID, size: number }>} + */ +const updateShard = async (context, positions, name, options) => { + const last = positions.pop() + + if (!last) { + throw errCode(new Error('Could not find parent'), 'EINVALIDPARENT') + } + const { bucket, prefix, node - } = positions.pop() + } = last + + if (!node) { + throw errCode(new Error('Could not find parent'), 'EINVALIDPARENT') + } const link = node.Links .find(link => link.Name.substring(0, 2) === prefix) if (!link) { - throw errCode(new Error(`No link found with prefix ${prefix} for file ${child.name}`), 'ERR_NOT_FOUND') + throw errCode(new Error(`No link found with prefix ${prefix} for file ${name}`), 'ERR_NOT_FOUND') } - if (link.Name === `${prefix}${child.name}`) { + if (link.Name === `${prefix}${name}`) { log(`Removing existing link ${link.Name}`) node.rmLink(link.Name) - await bucket.del(child.name) + await bucket.del(name) return updateHamtDirectory(context, node.Links, bucket, options) } - log(`Descending into sub-shard ${link.Name} for ${prefix}${child.name}`) + log(`Descending into sub-shard ${link.Name} for ${prefix}${name}`) - const result = await updateShard(context, positions, child, options) + const result = await updateShard(context, positions, name, options) + let cid = result.cid + let size = result.size let newName = prefix if (result.node.Links.length === 1) { log(`Removing subshard for ${prefix}`) // convert shard back to normal dir - result.cid = result.node.Links[0].Hash - result.node = result.node.Links[0] + const link = result.node.Links[0] - newName = `${prefix}${result.node.Name.substring(2)}` + newName = `${prefix}${link.Name.substring(2)}` + cid = link.Hash + size = link.Tsize } log(`Updating shard ${prefix} with name ${newName}`) - const size = DAGNode.isDAGNode(result.node) ? result.node.size : result.node.Tsize - - return updateShardParent(context, bucket, node, prefix, newName, size, result.cid, options) + return updateShardParent(context, bucket, node, prefix, newName, size, cid, options) } +/** + * @param {MfsContext} context + * @param {Bucket} bucket + * @param {DAGNode} parent + * @param {string} oldName + * @param {string} newName + * @param {number} size + * @param {CID} cid + * @param {RemoveLinkOptionsInternal} options + */ const updateShardParent = (context, bucket, parent, oldName, newName, size, cid, options) => { parent.rmLink(oldName) parent.addLink(new DAGLink(newName, size, cid)) diff --git a/packages/ipfs-core/src/components/files/utils/to-async-iterator.js b/packages/ipfs-core/src/components/files/utils/to-async-iterator.js index e591bcd2c0..93acfbcf26 100644 --- a/packages/ipfs-core/src/components/files/utils/to-async-iterator.js +++ b/packages/ipfs-core/src/components/files/utils/to-async-iterator.js @@ -7,6 +7,9 @@ const { } = require('../../../utils') const uint8ArrayFromString = require('uint8arrays/from-string') +/** + * @param {*} content + */ const toAsyncIterator = (content) => { if (!content) { throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') @@ -58,7 +61,11 @@ const toAsyncIterator = (content) => { const reader = new global.FileReader() + /** + * @param {{ error?: Error }} ev + */ const handleLoad = (ev) => { + // @ts-ignore No overload matches this call. reader.removeEventListener('loadend', handleLoad, false) if (ev.error) { @@ -71,6 +78,7 @@ const toAsyncIterator = (content) => { }) } + // @ts-ignore No overload matches this call. reader.addEventListener('loadend', handleLoad) reader.readAsArrayBuffer(chunk) }) diff --git a/packages/ipfs-core/src/components/files/utils/to-mfs-path.js b/packages/ipfs-core/src/components/files/utils/to-mfs-path.js index c5a23828af..247b5bb0bb 100644 --- a/packages/ipfs-core/src/components/files/utils/to-mfs-path.js +++ b/packages/ipfs-core/src/components/files/utils/to-mfs-path.js @@ -2,69 +2,153 @@ const loadMfsRoot = require('./with-mfs-root') const toPathComponents = require('./to-path-components') -const exporter = require('ipfs-unixfs-exporter') +const { exporter } = require('ipfs-unixfs-exporter') const errCode = require('err-code') const CID = require('cids') const IPFS_PREFIX = 'ipfs' +/** + * @typedef {import('ipfs-unixfs-exporter').UnixFSEntry} UnixFSEntry + * @typedef {import('ipfs-unixfs-exporter').ExporterOptions} ExporterOptions + * @typedef {import('../').MfsContext} MfsContext + * + * @typedef {object} FilePath + * @property {'mfs' | 'ipfs'} type + * @property {'file'} entryType + * @property {number} depth + * @property {string} mfsPath + * @property {string} mfsDirectory + * @property {string[]} parts + * @property {string} path + * @property {string} name + * @property {CID} cid + * @property {boolean} exists + * @property {import('ipfs-unixfs').UnixFS} unixfs + * @property {(options?: ExporterOptions) => AsyncIterable} content + * + * @typedef {object} DirectoryPath + * @property {'mfs' | 'ipfs'} type + * @property {'directory'} entryType + * @property {number} depth + * @property {string} mfsPath + * @property {string} mfsDirectory + * @property {string[]} parts + * @property {string} path + * @property {string} name + * @property {CID} cid + * @property {boolean} exists + * @property {import('ipfs-unixfs').UnixFS} unixfs + * @property {(options?: ExporterOptions) => AsyncIterable} content + * + * @typedef {object} ObjectPath + * @property {'mfs' | 'ipfs'} type + * @property {'object'} entryType + * @property {number} depth + * @property {string} mfsPath + * @property {string} mfsDirectory + * @property {string[]} parts + * @property {string} path + * @property {string} name + * @property {CID} cid + * @property {boolean} exists + * @property {(options?: ExporterOptions) => AsyncIterable} content + * + * @typedef {object} RawPath + * @property {'mfs' | 'ipfs'} type + * @property {'raw'} entryType + * @property {number} depth + * @property {string} mfsPath + * @property {string} mfsDirectory + * @property {string[]} parts + * @property {string} path + * @property {string} name + * @property {CID} cid + * @property {boolean} exists + * @property {(options?: ExporterOptions) => AsyncIterable} content + * + * @typedef {object} IdentityPath + * @property {'mfs' | 'ipfs'} type + * @property {'identity'} entryType + * @property {number} depth + * @property {string} mfsPath + * @property {string} mfsDirectory + * @property {string[]} parts + * @property {string} path + * @property {string} name + * @property {CID} cid + * @property {boolean} exists + * @property {(options?: ExporterOptions) => AsyncIterable} content + * + * @typedef {FilePath | DirectoryPath | ObjectPath | RawPath | IdentityPath} MfsPath + */ + +/** + * @param {MfsContext} context + * @param {string | CID} path + * @param {import('ipfs-core-types/src/basic').AbortOptions} [options] + */ const toMfsPath = async (context, path, options) => { - const outputArray = Array.isArray(path) - let paths = Array.isArray(path) ? path : [path] const root = await loadMfsRoot(context, options) - paths = paths.map(path => { - if (CID.isCID(path)) { - path = `/ipfs/${path}` - } - - path = (path || '').trim() - path = path.replace(/(\/\/+)/g, '/') + /** @type {MfsPath} */ + // @ts-ignore fields get set later + let output = { + entryType: 'file' + } - if (path.endsWith('/') && path.length > 1) { - path = path.substring(0, path.length - 1) - } + if (CID.isCID(path)) { + path = `/ipfs/${path}` + } - if (!path) { - throw errCode(new Error('paths must not be empty'), 'ERR_NO_PATH') - } + path = (path || '').trim() + path = path.replace(/(\/\/+)/g, '/') - if (path.substring(0, 1) !== '/') { - throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') - } + if (path.endsWith('/') && path.length > 1) { + path = path.substring(0, path.length - 1) + } - if (path.substring(path.length - 1) === '/') { - path = path.substring(0, path.length - 1) - } + if (!path) { + throw errCode(new Error('paths must not be empty'), 'ERR_NO_PATH') + } - const pathComponents = toPathComponents(path) + if (path.substring(0, 1) !== '/') { + throw errCode(new Error('paths must start with a leading slash'), 'ERR_INVALID_PATH') + } - if (pathComponents[0] === IPFS_PREFIX) { - // e.g. /ipfs/QMfoo or /ipfs/Qmfoo/sub/path - let mfsDirectory + if (path.substring(path.length - 1) === '/') { + path = path.substring(0, path.length - 1) + } - if (pathComponents.length === 2) { - mfsDirectory = `/${pathComponents.join('/')}` - } else { - mfsDirectory = `/${pathComponents.slice(0, pathComponents.length - 1).join('/')}` - } + const pathComponents = toPathComponents(path) - return { - type: 'ipfs', - depth: pathComponents.length - 2, + if (pathComponents[0] === IPFS_PREFIX) { + // e.g. /ipfs/QMfoo or /ipfs/Qmfoo/sub/path + let mfsDirectory - mfsPath: `/${pathComponents.join('/')}`, - mfsDirectory, - parts: pathComponents, - path: `/${pathComponents.join('/')}`, - name: pathComponents[pathComponents.length - 1] - } + if (pathComponents.length === 2) { + mfsDirectory = `/${pathComponents.join('/')}` + } else { + mfsDirectory = `/${pathComponents.slice(0, pathComponents.length - 1).join('/')}` } + // @ts-ignore fields being set + output = { + type: 'ipfs', + depth: pathComponents.length - 2, + + mfsPath: `/${pathComponents.join('/')}`, + mfsDirectory, + parts: pathComponents, + path: `/${pathComponents.join('/')}`, + name: pathComponents[pathComponents.length - 1] + } + } else { const mfsPath = `/${IPFS_PREFIX}/${root}${pathComponents.length ? '/' + pathComponents.join('/') : ''}` const mfsDirectory = `/${IPFS_PREFIX}/${root}/${pathComponents.slice(0, pathComponents.length - 1).join('/')}` - return { + // @ts-ignore fields being set + output = { type: 'mfs', depth: pathComponents.length, @@ -74,34 +158,30 @@ const toMfsPath = async (context, path, options) => { path: `/${pathComponents.join('/')}`, name: pathComponents[pathComponents.length - 1] } - }) - - await Promise.all( - paths.map(async (path) => { - const cidPath = path.type === 'mfs' ? path.mfsPath : path.path - - try { - const res = await exporter(cidPath, context.ipld) - - path.cid = res.cid - path.mfsPath = `/ipfs/${res.path}` - path.unixfs = res.unixfs - path.content = res.content - } catch (err) { - if (err.code !== 'ERR_NOT_FOUND') { - throw err - } - } - - path.exists = Boolean(path.cid) - }) - ) - - if (outputArray) { - return paths } - return paths[0] + const cidPath = output.type === 'mfs' ? output.mfsPath : output.path + + try { + const res = await exporter(cidPath, context.ipld) + + output.cid = res.cid + output.mfsPath = `/ipfs/${res.path}` + output.entryType = res.type + output.content = res.content + + if ((output.entryType === 'file' || output.entryType === 'directory') && (res.type === 'file' || res.type === 'directory')) { + output.unixfs = res.unixfs + } + } catch (err) { + if (err.code !== 'ERR_NOT_FOUND') { + throw err + } + } + + output.exists = Boolean(output.cid) + + return output } module.exports = toMfsPath diff --git a/packages/ipfs-core/src/components/files/utils/to-path-components.js b/packages/ipfs-core/src/components/files/utils/to-path-components.js index 5e8262727a..bc80b125d9 100644 --- a/packages/ipfs-core/src/components/files/utils/to-path-components.js +++ b/packages/ipfs-core/src/components/files/utils/to-path-components.js @@ -1,5 +1,8 @@ 'use strict' +/** + * @param {string} [path] + */ const toPathComponents = (path = '') => { // split on / unless escaped with \ return (path diff --git a/packages/ipfs-core/src/components/files/utils/to-sources-and-destination.js b/packages/ipfs-core/src/components/files/utils/to-sources-and-destination.js index 26e8d6a536..0b76a7ae2c 100644 --- a/packages/ipfs-core/src/components/files/utils/to-sources-and-destination.js +++ b/packages/ipfs-core/src/components/files/utils/to-sources-and-destination.js @@ -2,6 +2,15 @@ const toSources = require('./to-sources') +/** + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @param {MfsContext} context + * @param {*} args + * @param {*} defaultOptions + */ async function toSourcesAndDestination (context, args, defaultOptions) { const { sources, diff --git a/packages/ipfs-core/src/components/files/utils/to-sources.js b/packages/ipfs-core/src/components/files/utils/to-sources.js index 256e0440f8..b646138f2c 100644 --- a/packages/ipfs-core/src/components/files/utils/to-sources.js +++ b/packages/ipfs-core/src/components/files/utils/to-sources.js @@ -3,9 +3,22 @@ const toMfsPath = require('./to-mfs-path') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) +/** + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @template O + * + * @param {MfsContext} context + * @param {*} args + * @param {O} defaultOptions + */ async function toSources (context, args, defaultOptions) { + /** @type {string[]} */ const sources = [] - let options + /** @type {O} */ + let options = defaultOptions // takes string arguments and a final optional non-string argument for (let i = 0; i < args.length; i++) { @@ -19,7 +32,7 @@ async function toSources (context, args, defaultOptions) { options = mergeOptions(defaultOptions, options) return { - sources: await toMfsPath(context, sources, options), + sources: await Promise.all(sources.map(source => toMfsPath(context, source, options))), options } } diff --git a/packages/ipfs-core/src/components/files/utils/to-trail.js b/packages/ipfs-core/src/components/files/utils/to-trail.js index 7a42d8acca..387c50a920 100644 --- a/packages/ipfs-core/src/components/files/utils/to-trail.js +++ b/packages/ipfs-core/src/components/files/utils/to-trail.js @@ -1,19 +1,44 @@ 'use strict' -const exporter = require('ipfs-unixfs-exporter') +const { walkPath } = require('ipfs-unixfs-exporter') const log = require('debug')('ipfs:mfs:utils:to-trail') +/** + * @typedef {import('../').MfsContext} MfsContext + * @typedef {object} MfsTrail + * @property {string} name + * @property {import('cids')} cid + * @property {number} [size] + * @property {string} [type] + * + * TODO: export supported types from unixfs-exporter and use for `type` above + */ + +/** + * @param {MfsContext} context + * @param {string} path + * @returns {Promise} + */ const toTrail = async (context, path) => { log(`Creating trail for path ${path}`) const output = [] - for await (const fsEntry of exporter.path(path, context.ipld)) { + for await (const fsEntry of walkPath(path, context.ipld)) { + let size + + // TODO: include `.size` property in unixfs-exporter output + if (fsEntry.node instanceof Uint8Array) { + size = fsEntry.node.length + } else { + size = fsEntry.node.size + } + output.push({ name: fsEntry.name, cid: fsEntry.cid, - size: fsEntry.node.size, - type: fsEntry.unixfs ? fsEntry.unixfs.type : undefined + size, + type: fsEntry.type }) } diff --git a/packages/ipfs-core/src/components/files/utils/types.ts b/packages/ipfs-core/src/components/files/utils/types.ts deleted file mode 100644 index 9d22f87a08..0000000000 --- a/packages/ipfs-core/src/components/files/utils/types.ts +++ /dev/null @@ -1,12 +0,0 @@ -/** - * Helper type to represet monomorphic tuples with up to 8 items. - */ -export type Tuple = - | [T] - | [T, T] - | [T, T, T] - | [T, T, T, T] - | [T, T, T, T, T] - | [T, T, T, T, T, T] - | [T, T, T, T, T, T, T] - | [T, T, T, T, T, T, T, T] diff --git a/packages/ipfs-core/src/components/files/utils/update-mfs-root.js b/packages/ipfs-core/src/components/files/utils/update-mfs-root.js index d172c34c76..b16ed73c80 100644 --- a/packages/ipfs-core/src/components/files/utils/update-mfs-root.js +++ b/packages/ipfs-core/src/components/files/utils/update-mfs-root.js @@ -6,6 +6,15 @@ const { } = require('../../../utils') const errCode = require('err-code') +/** + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @param {MfsContext} context + * @param {import('cids')} cid + * @param {import('ipfs-core-types/src/basic').AbortOptions} options + */ const updateMfsRoot = async (context, cid, options) => { if (options && options.signal && options.signal.aborted) { throw errCode(new Error('Request aborted'), 'ERR_ABORTED', { name: 'Aborted' }) diff --git a/packages/ipfs-core/src/components/files/utils/update-tree.js b/packages/ipfs-core/src/components/files/utils/update-tree.js index dc78800482..226f9c79a8 100644 --- a/packages/ipfs-core/src/components/files/utils/update-tree.js +++ b/packages/ipfs-core/src/components/files/utils/update-tree.js @@ -7,7 +7,25 @@ const defaultOptions = { shardSplitThreshold: 1000 } -// loop backwards through the trail, replacing links of all components to update CIDs +/** + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids')} CID + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('../').MfsContext} MfsContext + * @typedef {import('./to-trail').MfsTrail} MfsTrail + */ + +/** + * Loop backwards through the trail, replacing links of all components to update CIDs + * + * @param {MfsContext} context + * @param {MfsTrail[]} trail + * @param {object} options + * @param {number} options.shardSplitThreshold + * @param {HashName} options.hashAlg + * @param {CIDVersion} options.cidVersion + * @param {boolean} options.flush + */ const updateTree = async (context, trail, options) => { options = Object.assign({}, defaultOptions, options) @@ -32,6 +50,7 @@ const updateTree = async (context, trail, options) => { continue } + /** @type {{ cid: CID, size: number }} */ const result = await addLink(context, { parent: node, name: child.name, diff --git a/packages/ipfs-core/src/components/files/utils/with-mfs-root.js b/packages/ipfs-core/src/components/files/utils/with-mfs-root.js index a25b0ceb73..0623ec8331 100644 --- a/packages/ipfs-core/src/components/files/utils/with-mfs-root.js +++ b/packages/ipfs-core/src/components/files/utils/with-mfs-root.js @@ -1,7 +1,7 @@ 'use strict' const CID = require('cids') -const UnixFs = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const { DAGNode } = require('ipld-dag-pb') @@ -14,6 +14,14 @@ const { MFS_ROOT_KEY } = require('../../../utils') +/** + * @typedef {import('../').MfsContext} MfsContext + */ + +/** + * @param {MfsContext} context + * @param {import('ipfs-core-types/src/basic').AbortOptions} [options] + */ const loadMfsRoot = async (context, options) => { if (options && options.signal && options.signal.aborted) { throw errCode(new Error('Request aborted'), 'ERR_ABORTED', { name: 'Aborted' }) @@ -35,7 +43,7 @@ const loadMfsRoot = async (context, options) => { } log('Creating new MFS root') - const node = new DAGNode(new UnixFs({ type: 'directory' }).marshal()) + const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal()) cid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: 0, hashAlg: mh.names['sha2-256'] // why can't ipld look this up? diff --git a/packages/ipfs-core/src/components/files/write.js b/packages/ipfs-core/src/components/files/write.js index 330043ca8a..c590bdb6b2 100644 --- a/packages/ipfs-core/src/components/files/write.js +++ b/packages/ipfs-core/src/components/files/write.js @@ -1,7 +1,7 @@ 'use strict' const log = require('debug')('ipfs:mfs:write') -const importer = require('ipfs-unixfs-importer') +const { importer } = require('ipfs-unixfs-importer') const stat = require('./stat') const mkdir = require('./mkdir') const addLink = require('./utils/add-link') @@ -20,9 +20,39 @@ const { const last = require('it-last') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('multihashes').HashName} HashName + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('ipfs-core-types/src/basic').ToMTime} Mtime + * @typedef {import('./').MfsContext} MfsContext + * @typedef {import('./utils/to-mfs-path').FilePath} FilePath + * @typedef {object} DefaultOptions + * @property {number} offset + * @property {number} length + * @property {boolean} create + * @property {boolean} truncate + * @property {boolean} rawLeaves + * @property {boolean} reduceSingleLeafToSelf + * @property {CIDVersion} cidVersion + * @property {HashName} hashAlg + * @property {boolean} parents + * @property {import('ipfs-core-types/src/root').AddProgressFn} progress + * @property {'trickle' | 'balanced'} strategy + * @property {boolean} flush + * @property {'raw' | 'file'} leafType + * @property {number} shardSplitThreshold + * @property {Mtime} [mtime] + * @property {number} [mode] + * @property {AbortSignal} [signal] + * @property {number} [timeout] + */ + +/** + * @type {DefaultOptions} + */ const defaultOptions = { offset: 0, // the offset in the file to begin writing - length: undefined, // how many bytes from the incoming buffer to write + length: Infinity, // how many bytes from the incoming buffer to write create: false, // whether to create the file if it does not exist truncate: false, // whether to truncate the file first rawLeaves: false, @@ -30,27 +60,23 @@ const defaultOptions = { cidVersion: 0, hashAlg: 'sha2-256', parents: false, // whether to create intermediate directories if they do not exist - progress: () => {}, + progress: (bytes, path) => {}, strategy: 'trickle', flush: true, leafType: 'raw', - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined, - signal: undefined + shardSplitThreshold: 1000 } +/** + * @param {MfsContext} context + */ module.exports = (context) => { /** - * Write to an MFS path - * - * @param {string} path - The MFS path where you will write to - * @param {string|Uint8Array|AsyncIterable|Blob} content - The content to write to the path - * @param {WriteOptions & AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/files').API["write"]} */ - async function mfsWrite (path, content, options = {}) { - options = mergeOptions(defaultOptions, options) + async function mfsWrite (path, content, opts = {}) { + /** @type {DefaultOptions} */ + const options = mergeOptions(defaultOptions, opts) let source, destination, parent log('Reading source, destination and parent') @@ -60,22 +86,42 @@ module.exports = (context) => { parent = await toMfsPath(context, destination.mfsDirectory, options) })() log('Read source, destination and parent') - // @ts-ignore - parent maybe undefined + // @ts-ignore - parent may be undefined if (!options.parents && !parent.exists) { throw errCode(new Error('directory does not exist'), 'ERR_NO_EXIST') } - // @ts-ignore - parent maybe undefined + if (!source) { + throw errCode(new Error('could not create source'), 'ERR_NO_SOURCE') + } + + if (!destination) { + throw errCode(new Error('could not create destination'), 'ERR_NO_DESTINATION') + } + + // @ts-ignore - destination may be never if (!options.create && !destination.exists) { throw errCode(new Error('file does not exist'), 'ERR_NO_EXIST') } + // @ts-ignore - destination may be never + if (destination.entryType !== 'file') { + throw errCode(new Error('not a file'), 'ERR_NOT_A_FILE') + } + return updateOrImport(context, path, source, destination, options) } return withTimeoutOption(mfsWrite) } +/** + * @param {MfsContext} context + * @param {string} path + * @param {AsyncIterable} source + * @param {FilePath} destination + * @param {DefaultOptions} options + */ const updateOrImport = async (context, path, source, destination, options) => { const child = await write(context, source, destination, options) @@ -84,6 +130,11 @@ const updateOrImport = async (context, path, source, destination, options) => { await createLock().writeLock(async () => { const pathComponents = toPathComponents(path) const fileName = pathComponents.pop() + + if (fileName == null) { + throw errCode(new Error('source does not exist'), 'ERR_NO_EXIST') + } + let parentExists = false try { @@ -104,7 +155,11 @@ const updateOrImport = async (context, path, source, destination, options) => { const trail = await toTrail(context, updatedPath.mfsDirectory) const parent = trail[trail.length - 1] - if (!parent.type.includes('directory')) { + if (!parent) { + throw errCode(new Error('directory does not exist'), 'ERR_NO_EXIST') + } + + if (!parent.type || !parent.type.includes('directory')) { throw errCode(new Error(`cannot write to ${parent.name}: Not a directory`), 'ERR_NOT_A_DIRECTORY') } @@ -131,6 +186,12 @@ const updateOrImport = async (context, path, source, destination, options) => { })() } +/** + * @param {MfsContext} context + * @param {AsyncIterable} source + * @param {FilePath} destination + * @param {DefaultOptions} options + */ const write = async (context, source, destination, options) => { if (destination.exists) { log(`Overwriting file ${destination.cid} offset ${options.offset} length ${options.length}`) @@ -138,6 +199,7 @@ const write = async (context, source, destination, options) => { log(`Writing file offset ${options.offset} length ${options.length}`) } + /** @type {Array<() => AsyncIterable>} */ const sources = [] // pad start of file if necessary @@ -206,7 +268,7 @@ const write = async (context, source, destination, options) => { let mtime - if (options.mtime !== undefined && options.mtine !== null) { + if (options.mtime != null) { mtime = options.mtime } else if (destination && destination.unixfs) { mtime = destination.unixfs.mtime @@ -229,6 +291,10 @@ const write = async (context, source, destination, options) => { pin: false })) + if (!result) { + throw errCode(new Error(`cannot write to ${parent.name}`), 'ERR_COULD_NOT_WRITE') + } + log(`Wrote ${result.cid}`) return { @@ -237,6 +303,10 @@ const write = async (context, source, destination, options) => { } } +/** + * @param {AsyncIterable} stream + * @param {number} limit + */ const limitAsyncStreamBytes = (stream, limit) => { return async function * _limitAsyncStreamBytes () { let emitted = 0 @@ -255,26 +325,35 @@ const limitAsyncStreamBytes = (stream, limit) => { } } +/** + * @param {number} count + * @param {number} chunkSize + */ const asyncZeroes = (count, chunkSize = MFS_MAX_CHUNK_SIZE) => { const buf = new Uint8Array(chunkSize) - const stream = { - [Symbol.asyncIterator]: function * _asyncZeroes () { - while (true) { - yield buf.slice() - } + async function * _asyncZeroes () { + while (true) { + yield buf.slice() } } - return limitAsyncStreamBytes(stream, count) + return limitAsyncStreamBytes(_asyncZeroes(), count) } +/** + * @param {Array<() => AsyncIterable>} sources + */ const catAsyncIterators = async function * (sources) { // eslint-disable-line require-await for (let i = 0; i < sources.length; i++) { yield * sources[i]() } } +/** + * @param {AsyncIterable} source + * @param {(count: number) => AsyncIterable} notify + */ const countBytesStreamed = async function * (source, notify) { let wrote = 0 @@ -290,20 +369,3 @@ const countBytesStreamed = async function * (source, notify) { yield buf } } - -/** - * @typedef {Object} WriteOptions - * @property {number} [offset] - An offset to start writing to file at - * @property {number} [length] - Optionally limit how many bytes are read from the stream - * @property {boolean} [create=false] - Create the MFS path if it does not exist - * @property {boolean} [parents=false] - Create intermediate MFS paths if they do not exist - * @property {boolean} [truncate=false] - Truncate the file at the MFS path if it would have been larger than the passed content - * @property {boolean} [rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf - * @property {import('ipfs-core-types/src/files').ToMode} [mode] - An integer that represents the file mode - * @property {import('ipfs-core-types/src/files').ToMTime} [mtime] - A Date object, an object with `{ secs, nsecs }` properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of `process.hrtime() - * @property {boolean} [flush] - If true the changes will be immediately flushed to disk - * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries - * @property {0|1} [cidVersion=0] - The CID version to use for any updated entries - * - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/gc-lock.js b/packages/ipfs-core/src/components/gc-lock.js index 9eed02de1c..16a7053c2c 100644 --- a/packages/ipfs-core/src/components/gc-lock.js +++ b/packages/ipfs-core/src/components/gc-lock.js @@ -1,5 +1,6 @@ 'use strict' +// @ts-ignore - no types const mortice = require('mortice') /** diff --git a/packages/ipfs-core/src/components/get.js b/packages/ipfs-core/src/components/get.js index f183dc37a2..f7a30afe60 100644 --- a/packages/ipfs-core/src/components/get.js +++ b/packages/ipfs-core/src/components/get.js @@ -7,19 +7,14 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @typedef {Object} Context - * @property {import('.').IPLD} ipld - * @property {import('.').Preload} preload + * @property {import('ipld')} ipld + * @property {import('../types').Preload} preload * * @param {Context} context */ module.exports = function ({ ipld, preload }) { /** - * Fetch a file or an entire directory tree from IPFS that is addressed by a - * valid IPFS Path. - * - * @param {import('ipfs-core-types/src/root').IPFSPath} ipfsPath - * @param {import('ipfs-core-types/src/root').GetOptions} [options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/root').API["get"]} */ async function * get (ipfsPath, options = {}) { if (options.preload !== false) { diff --git a/packages/ipfs-core/src/components/id.js b/packages/ipfs-core/src/components/id.js index cbd06eb187..d54730cf72 100644 --- a/packages/ipfs-core/src/components/id.js +++ b/packages/ipfs-core/src/components/id.js @@ -7,24 +7,18 @@ const uint8ArrayToString = require('uint8arrays/to-string') /** * @param {Object} config - * @param {import('.').PeerId} config.peerId - * @param {import('.').NetworkService} config.network + * @param {import('peer-id')} config.peerId + * @param {import('../types').NetworkService} config.network */ module.exports = ({ peerId, network }) => { /** - * Returns the identity of the Peer - * - * @param {import('../utils').AbortOptions} [_options] - * @returns {Promise} - * @example - * ```js - * const identity = await ipfs.id() - * console.log(identity) - * ``` + * @type {import('ipfs-core-types/src/root').API["id"]} */ async function id (_options) { // eslint-disable-line require-await const id = peerId.toB58String() + /** @type {import('multiaddr')[]} */ let addresses = [] + /** @type {string[]} */ let protocols = [] const net = network.try() diff --git a/packages/ipfs-core/src/components/index.js b/packages/ipfs-core/src/components/index.js index 20652c29a5..555ec3dac2 100644 --- a/packages/ipfs-core/src/components/index.js +++ b/packages/ipfs-core/src/components/index.js @@ -5,8 +5,7 @@ const { isTest } = require('ipfs-utils/src/env') const log = require('debug')('ipfs') const { DAGNode } = require('ipld-dag-pb') -const UnixFs = require('ipfs-unixfs') -const multicodec = require('multicodec') +const { UnixFS } = require('ipfs-unixfs') const initAssets = require('../runtime/init-assets-nodejs') const { AlreadyInitializedError } = require('../errors') @@ -36,7 +35,7 @@ const KeyAPI = require('./key') const ObjectAPI = require('./object') const RepoAPI = require('./repo') const StatsAPI = require('./stats') -const IPFSBlockService = require('ipfs-block-service') +const BlockService = require('ipfs-block-service') const createIPLD = require('./ipld') const Storage = require('./storage') const Network = require('./network') @@ -47,6 +46,12 @@ const createPingAPI = require('./ping') const createDHTAPI = require('./dht') const createPubSubAPI = require('./pubsub') +/** + * @typedef {import('../types').Options} Options + * @typedef {import('../types').Print} Print + * @typedef {import('./storage')} StorageAPI + */ + class IPFS { /** * @param {Object} config @@ -60,9 +65,8 @@ class IPFS { const preload = createPreloadAPI(options.preload) - /** @type {BlockService} */ - const blockService = new IPFSBlockService(storage.repo) - const ipld = createIPLD({ blockService, print, options: options.ipld }) + const blockService = new BlockService(storage.repo) + const ipld = createIPLD({ blockService, options: options.ipld }) const gcLock = createGCLockAPI({ path: repo.path, @@ -73,22 +77,21 @@ class IPFS { // @ts-ignore This type check fails as options. // libp2p can be a function, while IPNS router config expects libp2p config const ipns = new IPNSAPI(options) - const dagReader = DagAPI.reader({ ipld, preload }) const name = new NameAPI({ dns, ipns, - dagReader, + ipld, peerId, isOnline, keychain, options }) const resolve = createResolveAPI({ ipld, name }) - const pinManager = new PinManagerAPI({ repo, dagReader }) - const pin = new PinAPI({ gcLock, pinManager, dagReader }) + const pinManager = new PinManagerAPI({ repo, ipld }) + const pin = new PinAPI({ gcLock, pinManager, ipld }) const block = new BlockAPI({ blockService, preload, gcLock, pinManager, pin }) - const dag = new DagAPI({ ipld, preload, gcLock, pin, dagReader }) + const dag = new DagAPI({ ipld, preload, gcLock, pin }) const refs = Object.assign(createRefsAPI({ ipld, resolve, preload }), { local: createRefsLocalAPI({ repo: storage.repo }) }) @@ -167,7 +170,7 @@ class IPFS { this.dag = dag this.files = files this.key = new KeyAPI({ keychain }) - this.object = new ObjectAPI({ ipld, preload, gcLock, dag }) + this.object = new ObjectAPI({ ipld, preload, gcLock }) this.repo = new RepoAPI({ gcLock, pin, repo, refs }) this.stats = new StatsAPI({ repo, network }) this.swarm = new SwarmAPI({ network }) @@ -196,16 +199,11 @@ class IPFS { */ static async create (options = {}) { options = mergeOptions(getDefaultOptions(), options) + const initOptions = options.init || {} // eslint-disable-next-line no-console const print = options.silent ? log : console.log - - const init = { - ...mergeOptions(initOptions(options), options), - print - } - - const storage = await Storage.start(init) + const storage = await Storage.start(print, options) const config = await storage.repo.config.getAll() const ipfs = new IPFS({ @@ -217,7 +215,8 @@ class IPFS { await ipfs.preload.start() ipfs.ipns.startOffline(storage) - if (storage.isNew && !init.emptyRepo) { + + if (storage.isNew && !initOptions.emptyRepo) { // add empty unixfs dir object (go-ipfs assumes this exists) const cid = await addEmptyDir(ipfs) @@ -225,7 +224,7 @@ class IPFS { await initAssets({ addAll: ipfs.addAll, print }) log('initializing IPNS keyspace') - await ipfs.ipns.initializeKeyspace(storage.peerId.privKey, cid.toString()) + await ipfs.ipns.initializeKeyspace(storage.peerId.privKey, cid.bytes) } if (options.start !== false) { @@ -235,24 +234,18 @@ class IPFS { return ipfs } } -module.exports = IPFS -/** - * @param {Options} options - * @returns {InitOptions} - */ -const initOptions = ({ init }) => - typeof init === 'object' ? init : {} +module.exports = IPFS /** * @param {IPFS} ipfs */ const addEmptyDir = async (ipfs) => { - const node = new DAGNode(new UnixFs('directory').marshal()) + const node = new DAGNode(new UnixFS({ type: 'directory' }).marshal()) const cid = await ipfs.dag.put(node, { version: 0, - format: multicodec.DAG_PB, - hashAlg: multicodec.SHA2_256, + format: 'dag-pb', + hashAlg: 'sha2-256', preload: false }) @@ -277,112 +270,3 @@ const getDefaultOptions = () => ({ ] } }) - -/** - * @typedef {StorageOptions & IPFSOptions} Options - * - * @typedef {Object} IPFSOptions - * Options argument can be used to specify advanced configuration. - * @property {InitOptions} [init] - Initialization options - * the IPFS node. - * Note that *initializing* a repo is different from creating an instance of - * [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor - * sets many special properties when initializing a repo, so you should usually - * not try and call `repoInstance.init()` yourself. - * @property {boolean} [start=true] - If `false`, do not automatically - * start the IPFS node. Instead, you’ll need to manually call - * [`node.start()`](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs/docs/MODULE.md#nodestart) - * yourself. - * @property {string} [pass=null] - A passphrase to encrypt/decrypt your keys. - * @property {boolean} [silent=false] - Prevents all logging output from the - * IPFS node. (Default: `false`) - * @property {RelayOptions} [relay={ enabled: true, hop: { enabled: false, active: false } }] - * - Configure circuit relay (see the [circuit relay tutorial] - * (https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) - * to learn more). - * @property {boolean} [offline=false] - Run ipfs node offline. The node does - * not connect to the rest of the network but provides a local API. - * @property {PreloadOptions} [preload] - Configure remote preload nodes. - * The remote will preload content added on this node, and also attempt to - * preload objects requested by this node. - * @property {ExperimentalOptions} [EXPERIMENTAL] - Enable and configure - * experimental features. - * @property {IPFSConfig} [config] - Modify the default IPFS node config. This - * object will be *merged* with the default config; it will not replace it. - * (Default: [`config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-nodejs.js) - * in Node.js, [`config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-browser.js) - * in browsers) - * @property {IPLDOptions} [ipld] - Modify the default IPLD config. This object - * will be *merged* with the default config; it will not replace it. Check IPLD - * [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information - * on the available options. (Default: [`ipld.js`] - * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) - * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld.js) - * in browsers) - * @property {LibP2POptions|Function} [libp2p] - The libp2p option allows you to build - * your libp2p node by configuration, or via a bundle function. If you are - * looking to just modify the below options, using the object format is the - * quickest way to get the default features of libp2p. If you need to create a - * more customized libp2p node, such as with custom transports or peer/content - * routers that need some of the ipfs data on startup, a custom bundle is a - * great way to achieve this. - * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). - * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) - * for the list of options libp2p supports. - * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) - * in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in - * browsers. - * @property {boolean} [repoOwner] - * - * @typedef {object} ExperimentalOptions - * @property {boolean} [ipnsPubsub] - Enable pub-sub on IPNS. (Default: `false`) - * @property {boolean} [sharding] - Enable directory sharding. Directories that have many child objects will be represented by multiple DAG nodes instead of just one. It can improve lookup performance when a directory has several thousand files or more. (Default: `false`) - * - * - * @typedef {import('./storage').StorageOptions} StorageOptions - * @typedef {import('../preload').Options} PreloadOptions - * @typedef {import('./ipld').Options} IPLDOptions - * @typedef {import('./libp2p').Options} LibP2POptions - * - * @typedef {object} RelayOptions - * @property {boolean} [enabled] - Enable circuit relay dialer and listener. (Default: `true`) - * @property {object} [hop] - * @property {boolean} [hop.enabled] - Make this node a relay (other nodes can connect *through* it). (Default: `false`) - * @property {boolean} [hop.active] - Make this an *active* relay node. Active relay nodes will attempt to dial a destin - * - * @typedef {import('./storage').InitOptions} InitOptions - * - * @typedef {import('./storage')} StorageAPI - * - * @typedef {import('./network').Options} NetworkOptions - * @typedef {Service} NetworkService - * @typedef {import('./storage').Repo} Repo - * @typedef {(...args:any[]) => void} Print - * @typedef {import('./storage').Keychain} Keychain - * @typedef {import('./config').IPFSConfig} IPFSConfig - * - * @typedef {import('peer-id')} PeerId - * @typedef {import('./libp2p').LibP2P} LibP2P - * @typedef {import('./pin/pin-manager')} PinManager - * @typedef {import('ipfs-core-types/src/block-service').BlockService} BlockService - * @typedef {import('ipfs-core-types/src/bitswap').Bitswap} BitSwap - * @typedef {import('./ipld').IPLD} IPLD - * @typedef {import('./gc-lock').GCLock} GCLock - * @typedef {import('../preload').Preload} Preload - * @typedef {import('../mfs-preload').MFSPreload} MFSPreload - * @typedef {import('./ipns')} IPNS - * @typedef {import('./pin')} Pin - * @typedef {import('./block')} Block - * @typedef {import('./dag').DagReader} DagReader - * @typedef {import('./dag')} Dag - * @typedef {ReturnType} Files - * @typedef {ReturnType} IsOnline - * @typedef {ReturnType} Resolve - * @typedef {ReturnType} Refs - * @typedef {ReturnType} DNS - * @typedef {import('./name')} Name - * @typedef {import('../utils').AbortOptions} AbortOptions - * @typedef {import('cids')} CID - * @typedef {import('multiaddr')} Multiaddr - * @typedef {import('./ipld').Block} IPLDBlock - */ diff --git a/packages/ipfs-core/src/components/ipld.js b/packages/ipfs-core/src/components/ipld.js index a4505424d5..998166a50d 100644 --- a/packages/ipfs-core/src/components/ipld.js +++ b/packages/ipfs-core/src/components/ipld.js @@ -5,19 +5,11 @@ const Ipld = require('ipld') /** * @param {Object} config - * @param {BlockService} config.blockService - * @param {Print} config.print - * @param {Options} [config.options] - * @returns {IPLD} + * @param {import('ipfs-block-service')} config.blockService + * @param {import('ipld').Options} [config.options] */ -const createIPLD = ({ blockService, print, options }) => - new Ipld(getDefaultIpldOptions(blockService, options, print)) -module.exports = createIPLD +const createIPLD = ({ blockService, options }) => { + return new Ipld(getDefaultIpldOptions(blockService, options)) +} -/** - * @typedef {import('ipfs-core-types/src/ipld').IPLD} IPLD - * @typedef {import('ipfs-core-types/src/ipld').Options} Options - * @typedef {import('ipfs-core-types/src/block-service').BlockService} BlockService - * @typedef {import('ipfs-core-types/src/block-service').Block} Block - * @typedef {import('.').Print} Print - */ +module.exports = createIPLD diff --git a/packages/ipfs-core/src/components/ipns.js b/packages/ipfs-core/src/components/ipns.js index 4c732f6df4..2f85ed61f8 100644 --- a/packages/ipfs-core/src/components/ipns.js +++ b/packages/ipfs-core/src/components/ipns.js @@ -6,17 +6,34 @@ const OfflineDatastore = require('../ipns/routing/offline-datastore') const { NotInitializedError, AlreadyInitializedError } = require('../errors') const log = require('debug')('ipfs:components:ipns') +/** + * @typedef {import('libp2p-crypto').PrivateKey} PrivateKey + * + * @typedef {Object} ExperimentalOptions + * @property {boolean} [ipnsPubsub] + * + * @typedef {Object} LibP2POptions + * @property {DHTConfig} [config] + * + * @typedef {Object} DHTConfig + * @property {boolean} [enabled] + */ + class IPNSAPI { /** * @param {Object} options - * @param {string} [options.pass] + * @param {string} options.pass * @param {boolean} [options.offline] * @param {LibP2POptions} [options.libp2p] * @param {ExperimentalOptions} [options.EXPERIMENTAL] */ - constructor (options = {}) { + constructor (options = { pass: '' }) { this.options = options + + /** @type {IPNS | null} */ this.offline = null + + /** @type {IPNS | null} */ this.online = null } @@ -41,9 +58,9 @@ class IPNSAPI { * initializeKeyspace feature. * * @param {Object} config - * @param {import('.').Repo} config.repo - * @param {import('.').PeerId} config.peerId - * @param {import('.').Keychain} config.keychain + * @param {import('ipfs-repo')} config.repo + * @param {import('peer-id')} config.peerId + * @param {import('libp2p/src/keychain')} config.keychain */ startOffline ({ repo, peerId, keychain }) { if (this.offline != null) { @@ -60,10 +77,10 @@ class IPNSAPI { /** * @param {Object} config - * @param {import('.').LibP2P} config.libp2p - * @param {import('.').Repo} config.repo - * @param {import('.').PeerId} config.peerId - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p')} config.libp2p + * @param {import('ipfs-repo')} config.repo + * @param {import('peer-id')} config.peerId + * @param {import('libp2p/src/keychain')} config.keychain */ async startOnline ({ libp2p, repo, peerId, keychain }) { if (this.online != null) { @@ -71,6 +88,7 @@ class IPNSAPI { } const routing = routingConfig({ libp2p, repo, peerId, options: this.options }) + // @ts-ignore routing is a TieredDatastore which wants keys to be Keys, IPNS needs keys to be Uint8Arrays const ipns = new IPNS(routing, repo.datastore, peerId, keychain, this.options) await ipns.republisher.start() this.online = ipns @@ -84,27 +102,30 @@ class IPNSAPI { } } + /** + * @param {PrivateKey} privKey + * @param {Uint8Array} value + * @param {number} lifetime + */ publish (privKey, value, lifetime) { return this.getIPNS().publish(privKey, value, lifetime) } + /** + * + * @param {string} name + * @param {*} [options] + */ resolve (name, options) { return this.getIPNS().resolve(name, options) } + /** + * @param {PrivateKey} privKey + * @param {Uint8Array} value + */ initializeKeyspace (privKey, value) { return this.getIPNS().initializeKeyspace(privKey, value) } } module.exports = IPNSAPI - -/** - * @typedef {Object} ExperimentalOptions - * @property {boolean} [ipnsPubsub] - * - * @typedef {Object} LibP2POptions - * @property {DHTConfig} [config] - * - * @typedef {Object} DHTConfig - * @property {boolean} [enabled] - */ diff --git a/packages/ipfs-core/src/components/is-online.js b/packages/ipfs-core/src/components/is-online.js index 9bb6467ef1..54a3defefd 100644 --- a/packages/ipfs-core/src/components/is-online.js +++ b/packages/ipfs-core/src/components/is-online.js @@ -2,7 +2,7 @@ /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../types').NetworkService} config.network */ module.exports = ({ network }) => /** diff --git a/packages/ipfs-core/src/components/key/export.js b/packages/ipfs-core/src/components/key/export.js index b4ccc2f661..85a387f59b 100644 --- a/packages/ipfs-core/src/components/key/export.js +++ b/packages/ipfs-core/src/components/key/export.js @@ -4,26 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * Remove a key - * - * @example - * ```js - * const pem = await ipfs.key.export('self', 'password') - * - * console.log(pem) - * // -----BEGIN ENCRYPTED PRIVATE KEY----- - * // MIIFDTA/BgkqhkiG9w0BBQ0wMjAaBgkqhkiG9w0BBQwwDQQIpdO40RVyBwACAWQw - * // ... - * // YA== - * // -----END ENCRYPTED PRIVATE KEY----- - * ``` - * @param {string} name - The name of the key to export - * @param {string} password - Password to set on the PEM output - * @returns {Promise} - The string representation of the key + * @type {import('ipfs-core-types/src/key').API["export"]} */ const exportKey = (name, password) => keychain.exportKey(name, password) diff --git a/packages/ipfs-core/src/components/key/gen.js b/packages/ipfs-core/src/components/key/gen.js index 491a938657..f1ec77ee17 100644 --- a/packages/ipfs-core/src/components/key/gen.js +++ b/packages/ipfs-core/src/components/key/gen.js @@ -2,42 +2,20 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const DEFAULT_KEY_TYPE = 'rsa' +const DEFAULT_KEY_SIZE = 2048 + /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * Generate a new key - * - * @example - * ```js - * const key = await ipfs.key.gen('my-key', { - * type: 'rsa', - * size: 2048 - * }) - * - * console.log(key) - * // { id: 'QmYWqAFvLWb2G5A69JGXui2JJXzaHXiUEmQkQgor6kNNcJ', - * // name: 'my-key' } - * ``` - * - * @param {string} name - The name to give the key - * @param {GenOptions & AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/key').API["gen"]} */ - const gen = (name, options = {}) => { - return keychain.createKey(name, options.type || 'rsa', options.size || 2048) + const gen = (name, options = { type: DEFAULT_KEY_TYPE, size: DEFAULT_KEY_SIZE }) => { + return keychain.createKey(name, options.type || DEFAULT_KEY_TYPE, options.size || DEFAULT_KEY_SIZE) } return withTimeoutOption(gen) } - -/** - * @typedef {Object} GenOptions - * @property {import('libp2p-crypto').KeyType} [type='RSA'] - The key type - * @property {number} [size=2048] - The key size in bits - * - * @typedef {import('.').Key} Key - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/key/import.js b/packages/ipfs-core/src/components/key/import.js index 35eebe224d..14b4c966f6 100644 --- a/packages/ipfs-core/src/components/key/import.js +++ b/packages/ipfs-core/src/components/key/import.js @@ -4,24 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * Remove a key - * - * @example - * ```js - * const key = await ipfs.key.import('clone', pem, 'password') - * - * console.log(key) - * // { id: 'QmQRiays958UM7norGRQUG3tmrLq8pJdmJarwYSk2eLthQ', - * // name: 'clone' } - * ``` - * @param {string} name - The name of the key to import - * @param {string} pem - The PEM encoded key - * @param {string} password - The password that protects the PEM key - * @returns {Promise} - An object that describes the new key + * @type {import('ipfs-core-types/src/key').API["import"]} */ const importKey = (name, pem, password) => { return keychain.importKey(name, pem, password) diff --git a/packages/ipfs-core/src/components/key/index.js b/packages/ipfs-core/src/components/key/index.js index 4869addc74..aa176cf776 100644 --- a/packages/ipfs-core/src/components/key/index.js +++ b/packages/ipfs-core/src/components/key/index.js @@ -8,6 +8,10 @@ const createList = require('./list') const createRename = require('./rename') const createRm = require('./rm') +/** + * @typedef {import('libp2p/src/keychain')} Keychain + */ + class KeyAPI { /** * @param {Object} config @@ -24,12 +28,3 @@ class KeyAPI { } } module.exports = KeyAPI - -/** - * @typedef {import('..').Keychain} Keychain - * @typedef {import('..').AbortOptions} AbortOptions - * - * @typedef {Object} Key - * @property {string} name - * @property {string} id - */ diff --git a/packages/ipfs-core/src/components/key/info.js b/packages/ipfs-core/src/components/key/info.js index d585027a8c..7a2456e3d2 100644 --- a/packages/ipfs-core/src/components/key/info.js +++ b/packages/ipfs-core/src/components/key/info.js @@ -4,19 +4,13 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * @param {string} name - * @returns {Promise} + * @type {import('ipfs-core-types/src/key').API["info"]} */ const info = (name) => keychain.findKeyByName(name) return withTimeoutOption(info) } - -/** - * @typedef {import('.').Key} Key - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/key/list.js b/packages/ipfs-core/src/components/key/list.js index 66e05baca5..0f5c83fda1 100644 --- a/packages/ipfs-core/src/components/key/list.js +++ b/packages/ipfs-core/src/components/key/list.js @@ -4,26 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * List all the keys - * - * @example - * ```js - * const keys = await ipfs.key.list() - * - * console.log(keys) - * // [ - * // { id: 'QmTe4tuceM2sAmuZiFsJ9tmAopA8au71NabBDdpPYDjxAb', - * // name: 'self' }, - * // { id: 'QmWETF5QvzGnP7jKq5sPDiRjSM2fzwzNsna4wSBEzRzK6W', - * // name: 'my-key' } - * // ] - * ``` - * - * @returns {Promise} + * @type {import('ipfs-core-types/src/key').API["list"]} */ const list = () => keychain.listKeys() diff --git a/packages/ipfs-core/src/components/key/rename.js b/packages/ipfs-core/src/components/key/rename.js index 2012a81f87..a02ed0d13c 100644 --- a/packages/ipfs-core/src/components/key/rename.js +++ b/packages/ipfs-core/src/components/key/rename.js @@ -4,25 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * Rename a key - * - * @example - * ```js - * const key = await ipfs.key.rename('my-key', 'my-new-key') - * - * console.log(key) - * // { id: 'Qmd4xC46Um6s24MradViGLFtMitvrR4SVexKUgPgFjMNzg', - * // was: 'my-key', - * // now: 'my-new-key', - * // overwrite: false } - * ``` - * @param {string} oldName - The current key name - * @param {string} newName - The desired key name - * @returns {Promise} + * @type {import('ipfs-core-types/src/key').API["rename"]} */ const rename = async (oldName, newName) => { const key = await keychain.renameKey(oldName, newName) @@ -37,13 +23,3 @@ module.exports = ({ keychain }) => { return withTimeoutOption(rename) } - -/** - * @typedef {Object} RenamedKey - * @property {string} was - The name of the key - * @property {string} now - The hash of the key - * @property {string} id - * @property {boolean} overwrite - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/key/rm.js b/packages/ipfs-core/src/components/key/rm.js index a27dd8e091..134354b9c0 100644 --- a/packages/ipfs-core/src/components/key/rm.js +++ b/packages/ipfs-core/src/components/key/rm.js @@ -4,31 +4,13 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Keychain} config.keychain + * @param {import('libp2p/src/keychain')} config.keychain */ module.exports = ({ keychain }) => { /** - * Remove a key - * - * @example - * ```js - * const key = await ipfs.key.rm('my-key') - * - * console.log(key) - * // { id: 'QmWETF5QvzGnP7jKq5sPDiRjSM2fzwzNsna4wSBEzRzK6W', - * // name: 'my-key' } - * ``` - * - * @param {string} name - The name of the key to remove - * @returns {Promise} - An object that describes the removed key + * @type {import('ipfs-core-types/src/key').API["rm"]} */ const rm = (name) => keychain.removeKey(name) return withTimeoutOption(rm) } - -/** - * @typedef {Object} RemovedKey - * @property {string} name - The name of the key - * @property {string} id - The hash of the key - */ diff --git a/packages/ipfs-core/src/components/libp2p.js b/packages/ipfs-core/src/components/libp2p.js index 5073f8e649..236ddaa726 100644 --- a/packages/ipfs-core/src/components/libp2p.js +++ b/packages/ipfs-core/src/components/libp2p.js @@ -5,6 +5,19 @@ const mergeOptions = require('merge-options') const errCode = require('err-code') const PubsubRouters = require('../runtime/libp2p-pubsub-routers-nodejs') +/** + * @typedef {Object} KeychainConfig + * @property {string} [pass] + * + * @typedef {import('ipfs-repo')} Repo + * @typedef {import('multiaddr')} Multiaddr + * @typedef {import('peer-id')} PeerId + * @typedef {import('../types').Options} IPFSOptions + * @typedef {import('libp2p')} LibP2P + * @typedef {import('libp2p').Libp2pOptions & import('libp2p').constructorOptions} Options + * @typedef {import('ipfs-core-types/src/config').Config} IPFSConfig + */ + /** * @param {Object} config * @param {Repo} config.repo @@ -13,7 +26,6 @@ const PubsubRouters = require('../runtime/libp2p-pubsub-routers-nodejs') * @param {Multiaddr[]|undefined} config.multiaddrs * @param {KeychainConfig|undefined} config.keychainConfig * @param {Partial|undefined} config.config - * @returns {LibP2P} */ module.exports = ({ options = {}, @@ -59,10 +71,12 @@ function getLibp2pOptions ({ options, config, datastore, keys, keychainConfig, p const getPubsubRouter = () => { const router = get(config, 'Pubsub.Router') || 'gossipsub' + // @ts-ignore - `router` value is not constrained if (!PubsubRouters[router]) { throw errCode(new Error(`Router unavailable. Configure libp2p.modules.pubsub to use the ${router} router.`), 'ERR_NOT_SUPPORTED') } + // @ts-ignore - `router` value is not constrained return PubsubRouters[router] } @@ -163,16 +177,3 @@ function getLibp2pOptions ({ options, config, datastore, keys, keychainConfig, p return libp2pConfig } - -/** - * @typedef {Object} KeychainConfig - * @property {string} [pass] - * - * @typedef {import('.').Repo} Repo - * @typedef {import('.').Multiaddr} Multiaddr - * @typedef {import('.').PeerId} PeerId - * @typedef {import('.').Options} IPFSOptions - * @typedef {import('libp2p')} LibP2P - * @typedef {import('libp2p').Libp2pOptions & import('libp2p').constructorOptions} Options - * @typedef {import('.').IPFSConfig} IPFSConfig - */ diff --git a/packages/ipfs-core/src/components/ls.js b/packages/ipfs-core/src/components/ls.js index c97985d9e0..d19bb5ee21 100644 --- a/packages/ipfs-core/src/components/ls.js +++ b/packages/ipfs-core/src/components/ls.js @@ -1,28 +1,23 @@ 'use strict' -const exporter = require('ipfs-unixfs-exporter') +const { exporter, recursive } = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile } = require('../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @typedef {Object} Context - * @property {import('.').IPLD} ipld - * @property {import('.').Preload} preload + * @property {import('ipld')} ipld + * @property {import('../types').Preload} preload * * @param {Context} context */ module.exports = function ({ ipld, preload }) { /** - * Lists a directory from IPFS that is addressed by a valid IPFS Path. - * - * @param {import('ipfs-core-types/src/root').IPFSPath} ipfsPath - * @param {import('ipfs-core-types/src/root').ListOptions} [options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/root').API["ls"]} */ async function * ls (ipfsPath, options = {}) { const path = normalizeCidPath(ipfsPath) - const recursive = options.recursive const pathComponents = path.split('/') if (options.preload !== false) { @@ -31,18 +26,14 @@ module.exports = function ({ ipld, preload }) { const file = await exporter(ipfsPath, ipld, options) - if (!file.unixfs) { - throw errCode(new Error('dag node was not a UnixFS node'), 'ERR_NOT_UNIXFS') - } - - if (file.unixfs.type === 'file') { + if (file.type === 'file') { yield mapFile(file, options) return } - if (file.unixfs.type.includes('dir')) { - if (recursive) { - for await (const child of exporter.recursive(file.cid, ipld, options)) { + if (file.type === 'directory') { + if (options.recursive) { + for await (const child of recursive(file.cid, ipld, options)) { if (file.cid.toBaseEncodedString() === child.cid.toBaseEncodedString()) { continue } @@ -53,17 +44,17 @@ module.exports = function ({ ipld, preload }) { return } - for await (let child of file.content()) { - child = mapFile(child, options) - child.depth-- + for await (const child of file.content()) { + const entry = mapFile(child, options) + entry.depth-- - yield child + yield entry } return } - throw errCode(new Error(`Unknown UnixFS type ${file.unixfs.type}`), 'ERR_UNKNOWN_UNIXFS_TYPE') + throw errCode(new Error(`Unknown UnixFS type ${file.type}`), 'ERR_UNKNOWN_UNIXFS_TYPE') } return withTimeoutOption(ls) diff --git a/packages/ipfs-core/src/components/name/index.js b/packages/ipfs-core/src/components/name/index.js index a8bb3b7faf..58fa79e72f 100644 --- a/packages/ipfs-core/src/components/name/index.js +++ b/packages/ipfs-core/src/components/name/index.js @@ -3,39 +3,23 @@ const createPublishAPI = require('./publish') const createResolveAPI = require('./resolve') const PubSubAPI = require('./pubsub') + class NameAPI { /** * @param {Object} config - * @param {IPNS} config.ipns - * @param {PeerId} config.peerId - * @param {Options} config.options - * @param {DagReader} config.dagReader - * @param {IsOnline} config.isOnline - * @param {Keychain} config.keychain - * @param {DNS} config.dns + * @param {import('../ipns')} config.ipns + * @param {import('peer-id')} config.peerId + * @param {import('../../types').Options} config.options + * @param {import('ipld')} config.ipld + * @param {import('ipfs-core-types/src/root').API["isOnline"]} config.isOnline + * @param {import('libp2p/src/keychain')} config.keychain + * @param {import('ipfs-core-types/src/root').API["dns"]} config.dns */ - constructor ({ dns, ipns, dagReader, peerId, isOnline, keychain, options }) { - this.publish = createPublishAPI({ ipns, dagReader, peerId, isOnline, keychain }) + constructor ({ dns, ipns, ipld, peerId, isOnline, keychain, options }) { + this.publish = createPublishAPI({ ipns, ipld, peerId, isOnline, keychain }) this.resolve = createResolveAPI({ dns, ipns, peerId, isOnline, options }) - this.pubsub = new PubSubAPI({ ipns, options: options.EXPERIMENTAL }) + this.pubsub = new PubSubAPI({ ipns, options }) } } -module.exports = NameAPI -/** - * @typedef {ResolveOptions & ExperimentalOptions} Options - * - * @typedef {Object} ExperimentalOptions - * @property {PubSubOptions} [EXPERIMENTAL] - * - * @typedef {import('./pubsub').Options} PubSubOptions - * @typedef {import('./resolve').ResolveOptions} ResolveOptions - * - * @typedef {import('..').IPNS} IPNS - * @typedef {import('..').PeerId} PeerId - * @typedef {import('..').DagReader} DagReader - * @typedef {import('..').Keychain} Keychain - * @typedef {import('..').IsOnline} IsOnline - * @typedef {import('..').DNS} DNS - * @typedef {import('..').AbortOptions} AbortOptions - */ +module.exports = NameAPI diff --git a/packages/ipfs-core/src/components/name/publish.js b/packages/ipfs-core/src/components/name/publish.js index 9328debdea..6985e8f013 100644 --- a/packages/ipfs-core/src/components/name/publish.js +++ b/packages/ipfs-core/src/components/name/publish.js @@ -9,21 +9,27 @@ const log = Object.assign(debug('ipfs:name:publish'), { error: debug('ipfs:name:publish:error') }) -const { OFFLINE_ERROR, normalizePath } = require('../../utils') +const { OFFLINE_ERROR } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const { resolvePath } = require('./utils') + +/** + * @typedef {import('cids')} CID + */ /** * IPNS - Inter-Planetary Naming System * * @param {Object} config - * @param {import('.').IPNS} config.ipns - * @param {import('.').DagReader} config.dagReader - * @param {import('.').PeerId} config.peerId - * @param {import('.').IsOnline} config.isOnline - * @param {import('.').Keychain} config.keychain + * @param {import('../ipns')} config.ipns + * @param {import('ipld')} config.ipld + * @param {import('peer-id')} config.peerId + * @param {import('ipfs-core-types/src/root').API["isOnline"]} config.isOnline + * @param {import('libp2p/src/keychain')} config.keychain */ -module.exports = ({ ipns, dagReader, peerId, isOnline, keychain }) => { +module.exports = ({ ipns, ipld, peerId, isOnline, keychain }) => { + /** + * @param {string} keyName + */ const lookupKey = async keyName => { if (keyName === 'self') { return peerId.privKey @@ -41,24 +47,7 @@ module.exports = ({ ipns, dagReader, peerId, isOnline, keychain }) => { } /** - * IPNS is a PKI namespace, where names are the hashes of public keys, and - * the private key enables publishing new (signed) values. In both publish - * and resolve, the default name used is the node's own PeerID, - * which is the hash of its public key. - * - * @param {string} value - ipfs path of the object to be published. - * @param {PublishOptions} [options] - * @returns {Promise} - * @example - * ```js - * // The address of your files. - * const addr = '/ipfs/QmbezGequPwcsWo8UL4wDF6a8hYwM1hmbzYv2mnKkEWaUp' - * const res = await ipfs.name.publish(addr) - * // You now have a res which contains two fields: - * // - name: the name under which the content was published. - * // - value: the "real" address to which Name points. - * console.log(`https://gateway.ipfs.io/ipns/${res.name}`) - * ``` + * @type {import('ipfs-core-types/src/name').API["publish"]} */ async function publish (value, options = {}) { const resolve = !(options.resolve === false) @@ -71,14 +60,6 @@ module.exports = ({ ipns, dagReader, peerId, isOnline, keychain }) => { // TODO: params related logic should be in the core implementation - // Normalize path value - try { - value = normalizePath(value) - } catch (err) { - log.error(err) - throw err - } - let pubLifetime = 0 try { pubLifetime = parseDuration(lifetime) || 0 @@ -94,35 +75,13 @@ module.exports = ({ ipns, dagReader, peerId, isOnline, keychain }) => { const results = await Promise.all([ // verify if the path exists, if not, an error will stop the execution lookupKey(key), - resolve ? resolvePath({ ipns, dagReader }, value) : Promise.resolve() + // if resolving, do a get so we make sure we have the blocks + resolve ? ipld.get(value, options) : Promise.resolve() ]) // Start publishing process - return ipns.publish(results[0], value, pubLifetime) + return ipns.publish(results[0], value.bytes, pubLifetime) } return withTimeoutOption(publish) } - -/** - * @typedef {PublishSettings & AbortOptions} PublishOptions - * ipfs publish options. - * - * @typedef {Object} PublishSettings - * @property {boolean} [resolve=true] - Resolve given path before publishing. - * @property {string} [lifetime='24h'] - Time duration of the record. - * @property {string} [ttl] - Time duration this record should be cached. - * @property {string} [key=self] - Name of the key to be used. - * @property {boolean} [allowOffline=true] - When offline, save the IPNS record - * to the the local datastore without broadcasting to the network instead of - * simply failing. - * - * This option is not yet implemented in js-ipfs. See tracking issue [ipfs/js-ipfs#1997] - * (https://github.com/ipfs/js-ipfs/issues/1997). - * - * @typedef {Object} PublishResult - * @property {string} name - * @property {string} value - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/name/pubsub/cancel.js b/packages/ipfs-core/src/components/name/pubsub/cancel.js index 5d9508c52d..c5debab136 100644 --- a/packages/ipfs-core/src/components/name/pubsub/cancel.js +++ b/packages/ipfs-core/src/components/name/pubsub/cancel.js @@ -5,26 +5,17 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPNS} config.ipns - * @param {import('.').Options} [config.options] + * @param {import('../../ipns')} config.ipns + * @param {import('../../../types').Options} config.options */ -module.exports = ({ ipns, options: routingOptions }) => { +module.exports = ({ ipns, options }) => { + const experimental = options.EXPERIMENTAL + /** - * Cancel a name subscription. - * - * @param {string} name - The name of the subscription to cancel. - * @param {import('.').AbortOptions} [options] - * @returns {Promise<{ canceled: boolean }>} - * @example - * ```js - * const name = 'QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm' - * const result = await ipfs.name.pubsub.cancel(name) - * console.log(result.canceled) - * // Logs: true - * ``` + * @type {import('ipfs-core-types/src/name/pubsub').API["cancel"]} */ async function cancel (name, options) { // eslint-disable-line require-await - const pubsub = getPubsubRouting(ipns, routingOptions) + const pubsub = getPubsubRouting(ipns, experimental) return pubsub.cancel(name, options) } diff --git a/packages/ipfs-core/src/components/name/pubsub/index.js b/packages/ipfs-core/src/components/name/pubsub/index.js index 1ea174e082..417ea94d30 100644 --- a/packages/ipfs-core/src/components/name/pubsub/index.js +++ b/packages/ipfs-core/src/components/name/pubsub/index.js @@ -7,8 +7,8 @@ const createSubsAPI = require('./subs') class PubSubAPI { /** * @param {Object} config - * @param {IPNS} config.ipns - * @param {Options} [config.options] + * @param {import('../../ipns')} config.ipns + * @param {import('../../../types').Options} config.options */ constructor ({ ipns, options }) { this.cancel = createCancelAPI({ ipns, options }) @@ -17,9 +17,3 @@ class PubSubAPI { } } module.exports = PubSubAPI - -/** - * @typedef {import('..').IPNS} IPNS - * @typedef {import('./utils').PubSubRoutingOptions} Options - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/name/pubsub/state.js b/packages/ipfs-core/src/components/name/pubsub/state.js index 4794d4abc4..369dbce1a1 100644 --- a/packages/ipfs-core/src/components/name/pubsub/state.js +++ b/packages/ipfs-core/src/components/name/pubsub/state.js @@ -5,24 +5,18 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPNS} config.ipns - * @param {import('.').Options} [config.options] + * @param {import('../../ipns')} config.ipns + * @param {import('../../../types').Options} config.options */ -module.exports = ({ ipns, options: routingOptions }) => { +module.exports = ({ ipns, options }) => { + const experimental = options.EXPERIMENTAL + /** - * Query the state of IPNS pubsub. - * - * @param {import('.').AbortOptions} [_options] - * @returns {Promise<{ enabled: boolean }>} - * ```js - * const result = await ipfs.name.pubsub.state() - * console.log(result.enabled) - * // Logs: true - * ``` + * @type {import('ipfs-core-types/src/name/pubsub').API["state"]} */ async function state (_options) { // eslint-disable-line require-await try { - return { enabled: Boolean(getPubsubRouting(ipns, routingOptions)) } + return { enabled: Boolean(getPubsubRouting(ipns, experimental)) } } catch (err) { return { enabled: false } } diff --git a/packages/ipfs-core/src/components/name/pubsub/subs.js b/packages/ipfs-core/src/components/name/pubsub/subs.js index 3c16d72c2e..eeb2cd8b1a 100644 --- a/packages/ipfs-core/src/components/name/pubsub/subs.js +++ b/packages/ipfs-core/src/components/name/pubsub/subs.js @@ -5,24 +5,17 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPNS} config.ipns - * @param {import('.').Options} [config.options] + * @param {import('../../ipns')} config.ipns + * @param {import('../../../types').Options} config.options */ -module.exports = ({ ipns, options: routingOptions }) => { +module.exports = ({ ipns, options }) => { + const experimental = options.EXPERIMENTAL + /** - * Show current name subscriptions. - * - * @param {import('.').AbortOptions} [options] - * @returns {Promise} - * @example - * ```js - * const result = await ipfs.name.pubsub.subs() - * console.log(result) - * // Logs: ['/ipns/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm'] - * ``` + * @type {import('ipfs-core-types/src/name/pubsub').API["subs"]} */ async function subs (options) { // eslint-disable-line require-await - const pubsub = getPubsubRouting(ipns, routingOptions) + const pubsub = getPubsubRouting(ipns, experimental) return pubsub.getSubscriptions(options) } diff --git a/packages/ipfs-core/src/components/name/pubsub/utils.js b/packages/ipfs-core/src/components/name/pubsub/utils.js index 43e7e69ae9..e9daacbf52 100644 --- a/packages/ipfs-core/src/components/name/pubsub/utils.js +++ b/packages/ipfs-core/src/components/name/pubsub/utils.js @@ -3,11 +3,16 @@ const IpnsPubsubDatastore = require('../../../ipns/routing/pubsub-datastore') const errcode = require('err-code') +/** + * @typedef {import('../../../types').ExperimentalOptions} ExperimentalOptions + * @property {boolean} [ipnsPubsub] - Enable pub-sub on IPNS. (Default: `false`) + */ + /** * Get pubsub from IPNS routing * - * @param {import('.').IPNS} ipns - * @param {PubSubRoutingOptions} [options] + * @param {import('../../ipns')} ipns + * @param {ExperimentalOptions} [options] */ exports.getPubsubRouting = (ipns, options) => { if (!ipns || !(options && options.ipnsPubsub)) { @@ -15,12 +20,12 @@ exports.getPubsubRouting = (ipns, options) => { } // Only one store and it is pubsub - if (IpnsPubsubDatastore.isIpnsPubsubDatastore(ipns.routing)) { + if (ipns.routing instanceof IpnsPubsubDatastore) { return ipns.routing } // Find in tiered - const pubsub = (ipns.routing.stores || []).find(s => IpnsPubsubDatastore.isIpnsPubsubDatastore(s)) + const pubsub = (ipns.routing.stores || []).find(s => s instanceof IpnsPubsubDatastore) if (!pubsub) { throw errcode(new Error('IPNS pubsub datastore not found'), 'ERR_PUBSUB_DATASTORE_NOT_FOUND') @@ -28,8 +33,3 @@ exports.getPubsubRouting = (ipns, options) => { return pubsub } - -/** - * @typedef {Object} PubSubRoutingOptions - * @property {boolean} [ipnsPubsub] - Enable pub-sub on IPNS. (Default: `false`) - */ diff --git a/packages/ipfs-core/src/components/name/resolve.js b/packages/ipfs-core/src/components/name/resolve.js index 0f747a4a7c..a1173e5b85 100644 --- a/packages/ipfs-core/src/components/name/resolve.js +++ b/packages/ipfs-core/src/components/name/resolve.js @@ -4,6 +4,7 @@ const debug = require('debug') const errcode = require('err-code') const { mergeOptions } = require('../../utils') const CID = require('cids') +// @ts-ignore no types const isDomain = require('is-domain-name') const log = Object.assign(debug('ipfs:name:resolve'), { @@ -28,29 +29,15 @@ const appendRemainder = (result, remainder) => * IPNS - Inter-Planetary Naming System * * @param {Object} config - * @param {import('.').DNS} config.dns - * @param {import('.').IPNS} config.ipns - * @param {import('.').PeerId} config.peerId - * @param {import('.').IsOnline} config.isOnline - * @param {ResolveOptions} config.options + * @param {import('ipfs-core-types/src/root').API["dns"]} config.dns + * @param {import('../ipns')} config.ipns + * @param {import('peer-id')} config.peerId + * @param {import('ipfs-core-types/src/root').API["isOnline"]} config.isOnline + * @param {import('../../types').Options} config.options */ module.exports = ({ dns, ipns, peerId, isOnline, options: { offline } }) => { /** - * Given a key, query the DHT for its best value. - * - * @param {string} name - ipns name to resolve. Defaults to your node's peerID. - * @param {Options & AbortOptions} [options] - * @returns {AsyncIterable} - * @example - * ```js - * // The IPNS address you want to resolve. - * const addr = '/ipns/ipfs.io' - * - * for await (const name of ipfs.name.resolve(addr)) { - * console.log(name) - * } - * // Logs: /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm - * ``` + * @type {import('ipfs-core-types/src/name').API["resolve"]} */ async function * resolve (name, options = {}) { // eslint-disable-line require-await options = mergeOptions({ @@ -98,16 +85,3 @@ module.exports = ({ dns, ipns, peerId, isOnline, options: { offline } }) => { return withTimeoutOption(resolve) } - -/** - * IPFS resolve options. - * - * @typedef {Object} Options - * @property {boolean} [options.nocache=false] - do not use cached entries. - * @property {boolean} [options.recursive=true] - resolve until the result is not an IPNS name. - * - * @typedef {Object} ResolveOptions - * @property {boolean} [offline] - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/name/utils.js b/packages/ipfs-core/src/components/name/utils.js index b2b5970f78..442cc3a576 100644 --- a/packages/ipfs-core/src/components/name/utils.js +++ b/packages/ipfs-core/src/components/name/utils.js @@ -1,27 +1,25 @@ 'use strict' const isIPFS = require('is-ipfs') +const CID = require('cids') /** * resolves the given path by parsing out protocol-specific entries * (e.g. /ipns/) and then going through the /ipfs/ entries and returning the final node * * @param {Object} context - * @param {IPNS} context.ipns - * @param {DagReader} context.dagReader + * @param {import('../ipns')} context.ipns + * @param {import('ipld')} context.ipld * @param {string} name */ -exports.resolvePath = ({ ipns, dagReader }, name) => { +exports.resolvePath = ({ ipns, ipld }, name) => { // ipns path if (isIPFS.ipnsPath(name)) { return ipns.resolve(name) } + const cid = new CID(name.substring('/ipfs/'.length)) + // ipfs path - return dagReader.get(name.substring('/ipfs/'.length)) + return ipld.get(cid) } - -/** - * @typedef {import('.').DagReader} DagReader - * @typedef {import('.').IPNS} IPNS - */ diff --git a/packages/ipfs-core/src/components/network.js b/packages/ipfs-core/src/components/network.js index be8111af99..2ad5eb7f59 100644 --- a/packages/ipfs-core/src/components/network.js +++ b/packages/ipfs-core/src/components/network.js @@ -5,11 +5,32 @@ const createLibP2P = require('./libp2p') const Multiaddr = require('multiaddr') const errCode = require('err-code') +/** + * @typedef {Object} Online + * @property {libp2p} libp2p + * @property {Bitswap} bitswap + * + * @typedef {Object} Options + * @property {PeerId} options.peerId + * @property {Repo} options.repo + * @property {Print} options.print + * @property {IPFSOptions} options.options + * + * @typedef {import('ipfs-core-types/src/config').Config} IPFSConfig + * @typedef {import('../types').Options} IPFSOptions + * @typedef {import('ipfs-repo')} Repo + * @typedef {import('../types').Print} Print + * @typedef {import('libp2p')} libp2p + * @typedef {import('ipfs-bitswap')} Bitswap + * @typedef {import('peer-id')} PeerId + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ + class Network { /** * @param {PeerId} peerId - * @param {LibP2P} libp2p - * @param {BitSwap} bitswap + * @param {libp2p} libp2p + * @param {Bitswap} bitswap */ constructor (peerId, libp2p, bitswap) { this.peerId = peerId @@ -33,7 +54,9 @@ class Network { options, repo, peerId, + // @ts-ignore - TODO move config types into ipfs-repo multiaddrs: readAddrs(peerId, config), + // @ts-ignore - TODO move config types into ipfs-repo config, keychainConfig: undefined }) @@ -101,23 +124,3 @@ const readAddrs = (peerId, config) => { } const WEBSOCKET_STAR_PROTO_CODE = 479 -/** - * @typedef {Object} Online - * @property {LibP2P} libp2p - * @property {BitSwap} bitswap - * - * @typedef {Object} Options - * @property {PeerId} options.peerId - * @property {Repo} options.repo - * @property {Print} options.print - * @property {IPFSOptions} options.options - * - * @typedef {import('.').IPFSConfig} IPFSConfig - * @typedef {import('.').Options} IPFSOptions - * @typedef {import('.').Repo} Repo - * @typedef {import('.').Print} Print - * @typedef {import('.').LibP2P} LibP2P - * @typedef {import('ipfs-core-types/src/bitswap').Bitswap} BitSwap - * @typedef {import('.').PeerId} PeerId - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/data.js b/packages/ipfs-core/src/components/object/data.js index 3245cc6f14..6ad3d55a32 100644 --- a/packages/ipfs-core/src/components/object/data.js +++ b/packages/ipfs-core/src/components/object/data.js @@ -4,16 +4,14 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { const get = require('./get')({ ipld, preload }) /** - * @param {import('.').CID} multihash - * @param {GetOptions & AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/object').API["data"]} */ async function data (multihash, options) { const node = await get(multihash, options) @@ -22,9 +20,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(data) } - -/** - * @typedef {import('cids')} CID - * @typedef {import('./get').GetOptions} GetOptions - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/get.js b/packages/ipfs-core/src/components/object/get.js index b294b3cec9..2080dcb6b2 100644 --- a/packages/ipfs-core/src/components/object/get.js +++ b/packages/ipfs-core/src/components/object/get.js @@ -3,61 +3,29 @@ const CID = require('cids') const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') -const uint8ArrayFromString = require('uint8arrays/from-string') /** * @typedef {import('multibase/src/types').BaseName} BaseName */ -/** - * @param {string|Uint8Array|CID} multihash - * @param {BaseName | 'utf8' | 'utf-8' | 'ascii' | 'base58'} [enc] - * @returns {string|Uint8Array} - */ -function normalizeMultihash (multihash, enc) { - if (typeof multihash === 'string') { - if (enc === 'base58' || !enc) { - return multihash - } - return uint8ArrayFromString(multihash, enc) - } else if (multihash instanceof Uint8Array) { - return multihash - } else if (CID.isCID(multihash)) { - return multihash.bytes - } - throw new Error('unsupported multihash') -} - /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { /** - * - * @param {CID} multihash - * @param {GetOptions & AbortOptions} [options] + * @type {import('ipfs-core-types/src/object').API["get"]} */ async function get (multihash, options = {}) { // eslint-disable-line require-await - let mh, cid + let cid try { - mh = normalizeMultihash(multihash, options.enc) - } catch (err) { - throw errCode(err, 'ERR_INVALID_MULTIHASH') - } - - try { - cid = new CID(mh) + cid = new CID(multihash) } catch (err) { throw errCode(err, 'ERR_INVALID_CID') } - if (options.cidVersion === 1) { - cid = cid.toV1() - } - if (options.preload !== false) { preload(cid) } @@ -67,12 +35,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(get) } - -/** - * @typedef {Object} GetOptions - * @property {boolean} [preload] - * @property {number} [cidVersion] - * @property {BaseName | 'utf8' | 'utf-8' | 'ascii' | 'base58'} [enc] - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/index.js b/packages/ipfs-core/src/components/object/index.js index 210ab36f3d..181ce3fd4f 100644 --- a/packages/ipfs-core/src/components/object/index.js +++ b/packages/ipfs-core/src/components/object/index.js @@ -8,18 +8,25 @@ const createPut = require('./put') const createStat = require('./stat') const ObjectPatchAPI = require('./patch') +/** + * @typedef {import('ipld')} IPLD + * @typedef {import('../../types').Preload} Preload + * @typedef {import('../gc-lock').GCLock} GCLock + * @typedef {import('cids')} CID + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ + class ObjectAPI { /** * @param {Object} config * @param {IPLD} config.ipld * @param {Preload} config.preload * @param {GCLock} config.gcLock - * @param {Dag} config.dag */ - constructor ({ ipld, preload, dag, gcLock }) { + constructor ({ ipld, preload, gcLock }) { this.data = createData({ ipld, preload }) this.get = createGet({ ipld, preload }) - this.links = createLinks({ dag }) + this.links = createLinks({ ipld }) this.new = createNew({ ipld, preload }) this.put = createPut({ ipld, preload, gcLock }) this.stat = createStat({ ipld, preload }) @@ -28,12 +35,3 @@ class ObjectAPI { } module.exports = ObjectAPI - -/** - * @typedef {import('..').IPLD} IPLD - * @typedef {import('..').Preload} Preload - * @typedef {import('..').GCLock} GCLock - * @typedef {import('..').Dag} Dag - * @typedef {import('..').CID} CID - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/links.js b/packages/ipfs-core/src/components/object/links.js index d0b3739945..dc9b80635b 100644 --- a/packages/ipfs-core/src/components/object/links.js +++ b/packages/ipfs-core/src/components/object/links.js @@ -1,7 +1,8 @@ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGLink = dagPB.DAGLink +const { + DAGLink +} = require('ipld-dag-pb') const CID = require('cids') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') @@ -42,17 +43,15 @@ function findLinks (node, links = []) { /** * @param {Object} config - * @param {import('.').Dag} config.dag + * @param {import('ipld')} config.ipld */ -module.exports = ({ dag }) => { +module.exports = ({ ipld }) => { /** - * @param {CID} multihash - * @param {import('.').AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/object').API["links"]} */ async function links (multihash, options = {}) { const cid = new CID(multihash) - const result = await dag.get(cid, options) + const result = await ipld.get(cid, options) if (cid.codec === 'raw') { return [] diff --git a/packages/ipfs-core/src/components/object/new.js b/packages/ipfs-core/src/components/object/new.js index 00d9199278..2289c100c8 100644 --- a/packages/ipfs-core/src/components/object/new.js +++ b/packages/ipfs-core/src/components/object/new.js @@ -1,28 +1,28 @@ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode +const { + DAGNode +} = require('ipld-dag-pb') const multicodec = require('multicodec') -const Unixfs = require('ipfs-unixfs') +const mh = require('multihashing-async').multihash +const { UnixFS } = require('ipfs-unixfs') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { /** - * - * @param {NewOptions & AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/object').API["new"]} */ async function _new (options = {}) { let data if (options.template) { if (options.template === 'unixfs-dir') { - data = (new Unixfs('directory')).marshal() + data = (new UnixFS({ type: 'directory' })).marshal() } else { throw new Error('unknown template') } @@ -34,7 +34,7 @@ module.exports = ({ ipld, preload }) => { const cid = await ipld.put(node, multicodec.DAG_PB, { cidVersion: 0, - hashAlg: multicodec.SHA2_256, + hashAlg: mh.names['sha2-256'], signal: options.signal }) @@ -47,15 +47,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(_new) } - -/** - * @typedef {Object} NewOptions - * @property {string} [template] - * @property {boolean} [recursive] - * @property {boolean} [nocache] - * @property {boolean} [preload] - * @property {string} [enc] - * - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/patch/add-link.js b/packages/ipfs-core/src/components/object/patch/add-link.js index f46b5e53f4..97815c0aea 100644 --- a/packages/ipfs-core/src/components/object/patch/add-link.js +++ b/packages/ipfs-core/src/components/object/patch/add-link.js @@ -4,14 +4,17 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../../types').Preload} config.preload * @param {import('.').GCLock} config.gcLock */ module.exports = ({ ipld, gcLock, preload }) => { const get = require('../get')({ ipld, preload }) const put = require('../put')({ ipld, gcLock, preload }) + /** + * @type {import('ipfs-core-types/src/object/patch').API["addLink"]} + */ async function addLink (multihash, link, options) { const node = await get(multihash, options) node.addLink(link) diff --git a/packages/ipfs-core/src/components/object/patch/append-data.js b/packages/ipfs-core/src/components/object/patch/append-data.js index b2d6b07c0d..998675abf0 100644 --- a/packages/ipfs-core/src/components/object/patch/append-data.js +++ b/packages/ipfs-core/src/components/object/patch/append-data.js @@ -6,13 +6,17 @@ const uint8ArrayConcat = require('uint8arrays/concat') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../../types').Preload} config.preload * @param {import('.').GCLock} config.gcLock */ module.exports = ({ ipld, gcLock, preload }) => { const get = require('../get')({ ipld, preload }) const put = require('../put')({ ipld, gcLock, preload }) + + /** + * @type {import('ipfs-core-types/src/object/patch').API["appendData"]} + */ async function appendData (multihash, data, options) { const node = await get(multihash, options) const newData = uint8ArrayConcat([node.Data, data]) diff --git a/packages/ipfs-core/src/components/object/patch/index.js b/packages/ipfs-core/src/components/object/patch/index.js index 5166c6b33e..d4654def5f 100644 --- a/packages/ipfs-core/src/components/object/patch/index.js +++ b/packages/ipfs-core/src/components/object/patch/index.js @@ -5,6 +5,14 @@ const createAppendData = require('./append-data') const createRmLink = require('./rm-link') const createSetData = require('./set-data') +/** + * @typedef {import('ipld')} IPLD + * @typedef {import('../../../types').Preload} Preload + * @typedef {import('..').GCLock} GCLock + * @typedef {import('cids')} CID + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ + class ObjectPatchAPI { /** * @param {Object} config @@ -20,11 +28,3 @@ class ObjectPatchAPI { } } module.exports = ObjectPatchAPI - -/** - * @typedef {import('..').IPLD} IPLD - * @typedef {import('..').Preload} Preload - * @typedef {import('..').GCLock} GCLock - * @typedef {import('..').CID} CID - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/patch/rm-link.js b/packages/ipfs-core/src/components/object/patch/rm-link.js index 3edf78ac4b..b587173e63 100644 --- a/packages/ipfs-core/src/components/object/patch/rm-link.js +++ b/packages/ipfs-core/src/components/object/patch/rm-link.js @@ -4,16 +4,20 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../../types').Preload} config.preload * @param {import('.').GCLock} config.gcLock */ module.exports = ({ ipld, gcLock, preload }) => { const get = require('../get')({ ipld, preload }) const put = require('../put')({ ipld, gcLock, preload }) + /** + * @type {import('ipfs-core-types/src/object/patch').API["rmLink"]} + */ async function rmLink (multihash, linkRef, options) { const node = await get(multihash, options) + // @ts-ignore - loose input types node.rmLink(linkRef.Name || linkRef.name) return put(node, options) } diff --git a/packages/ipfs-core/src/components/object/patch/set-data.js b/packages/ipfs-core/src/components/object/patch/set-data.js index 1777ed2a59..b5118a1991 100644 --- a/packages/ipfs-core/src/components/object/patch/set-data.js +++ b/packages/ipfs-core/src/components/object/patch/set-data.js @@ -5,14 +5,17 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../../types').Preload} config.preload * @param {import('.').GCLock} config.gcLock */ module.exports = ({ ipld, gcLock, preload }) => { const get = require('../get')({ ipld, preload }) const put = require('../put')({ ipld, gcLock, preload }) + /** + * @type {import('ipfs-core-types/src/object/patch').API["setData"]} + */ async function setData (multihash, data, options) { const node = await get(multihash, options) return put(new DAGNode(data, node.Links), options) diff --git a/packages/ipfs-core/src/components/object/put.js b/packages/ipfs-core/src/components/object/put.js index 6afbfbde2c..9d92baf8b9 100644 --- a/packages/ipfs-core/src/components/object/put.js +++ b/packages/ipfs-core/src/components/object/put.js @@ -1,14 +1,20 @@ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink +const { + DAGNode, + DAGLink, + util: DAGLinkUtil +} = require('ipld-dag-pb') const mh = require('multihashing-async').multihash const multicodec = require('multicodec') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') +/** + * @param {Uint8Array} buf + * @param {import('ipfs-core-types/src/object').PutEncoding} encoding + */ function parseBuffer (buf, encoding) { switch (encoding) { case 'json': @@ -16,10 +22,13 @@ function parseBuffer (buf, encoding) { case 'protobuf': return parseProtoBuffer(buf) default: - throw new Error(`unkown encoding: ${encoding}`) + throw new Error(`unknown encoding: ${encoding}`) } } +/** + * @param {Uint8Array} buf + */ function parseJSONBuffer (buf) { let data let links @@ -27,13 +36,19 @@ function parseJSONBuffer (buf) { try { const parsed = JSON.parse(uint8ArrayToString(buf)) + // @ts-ignore - loose input types links = (parsed.Links || []).map((link) => { return new DAGLink( + // @ts-ignore - loose input types link.Name || link.name, + // @ts-ignore - loose input types link.Size || link.size, + // @ts-ignore - loose input types mh.fromB58String(link.Hash || link.hash || link.multihash) ) }) + + // @ts-ignore - loose input types data = uint8ArrayFromString(parsed.Data) } catch (err) { throw new Error('failed to parse JSON: ' + err) @@ -42,22 +57,22 @@ function parseJSONBuffer (buf) { return new DAGNode(data, links) } +/** + * @param {Uint8Array} buf + */ function parseProtoBuffer (buf) { - return dagPB.util.deserialize(buf) + return DAGLinkUtil.deserialize(buf) } /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload * @param {import('.').GCLock} config.gcLock */ module.exports = ({ ipld, gcLock, preload }) => { /** - * - * @param {Uint8Array|DAGNode|{ Data: any, links: DAGLink[]}} obj - * @param {PutOptions & AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/object').API["put"]} */ async function put (obj, options = {}) { const encoding = options.enc @@ -69,7 +84,7 @@ module.exports = ({ ipld, gcLock, preload }) => { } else { node = new DAGNode(obj) } - } else if (DAGNode.isDAGNode(obj)) { + } else if (obj instanceof DAGNode) { // already a dag node node = obj } else if (typeof obj === 'object') { @@ -83,7 +98,7 @@ module.exports = ({ ipld, gcLock, preload }) => { try { const cid = await ipld.put(node, multicodec.DAG_PB, { cidVersion: 0, - hashAlg: multicodec.SHA2_256 + hashAlg: mh.names['sha2-256'] }) if (options.preload !== false) { @@ -98,12 +113,3 @@ module.exports = ({ ipld, gcLock, preload }) => { return withTimeoutOption(put) } - -/** - * @typedef {Object} PutOptions - * @property {boolean} [preload] - * @property {string} [enc] - * - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/object/stat.js b/packages/ipfs-core/src/components/object/stat.js index 666967c0be..43c8170e41 100644 --- a/packages/ipfs-core/src/components/object/stat.js +++ b/packages/ipfs-core/src/components/object/stat.js @@ -5,18 +5,14 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('../../types').Preload} config.preload */ module.exports = ({ ipld, preload }) => { const get = require('./get')({ ipld, preload }) /** - * Returns stats about an Object - * - * @param {CID} multihash - * @param {StatOptions & AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/object').API["stat"]} */ async function stat (multihash, options = {}) { const node = await get(multihash, options) @@ -40,16 +36,3 @@ module.exports = ({ ipld, preload }) => { return withTimeoutOption(stat) } -/** - * @typedef {Object} Stat - * @property {string} Hash - * @property {number} NumLinks - * @property {number} BlockSize - * @property {number} LinksSize - * @property {number} DataSize - * @property {number} CumulativeSize - * - * @typedef {import('./get').GetOptions} StatOptions - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/pin/add-all.js b/packages/ipfs-core/src/components/pin/add-all.js index cf6bdf6e69..149dda99c2 100644 --- a/packages/ipfs-core/src/components/pin/add-all.js +++ b/packages/ipfs-core/src/components/pin/add-all.js @@ -5,33 +5,29 @@ const { resolvePath } = require('../../utils') const PinManager = require('./pin-manager') const { PinTypes } = PinManager const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') - -/** @type {(source:Source) => AsyncIterable} */ const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +/** + * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Source} Source + * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Pin} PinTarget + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + * @typedef {import('cids')} CID + */ + +/** + * @template T + * @typedef {Iterable|AsyncIterable} AwaitIterable + */ + /** * @param {Object} config - * @param {import('.').GCLock} config.gcLock - * @param {import('.').DagReader} config.dagReader - * @param {import('.').PinManager} config.pinManager + * @param {import('../gc-lock').GCLock} config.gcLock + * @param {import('ipld')} config.ipld + * @param {import('./pin-manager')} config.pinManager */ -module.exports = ({ pinManager, gcLock, dagReader }) => { +module.exports = ({ pinManager, gcLock, ipld }) => { /** - * Adds multiple IPFS objects to the pinset and also stores it to the IPFS - * repo. pinset is the set of hashes currently pinned (not gc'able) - * - * @param {Source} source - One or more CIDs or IPFS Paths to pin in your repo - * @param {AddOptions} [options] - * @returns {AsyncIterable} - CIDs that were pinned. - * @example - * ```js - * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') - * for await (const cid of ipfs.pin.addAll([cid])) { - * console.log(cid) - * } - * // Logs: - * // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') - * ``` + * @type {import('ipfs-core-types/src/pin').API["addAll"]} */ async function * addAll (source, options = {}) { /** @@ -39,7 +35,7 @@ module.exports = ({ pinManager, gcLock, dagReader }) => { */ const pinAdd = async function * () { for await (const { path, recursive, metadata } of normaliseInput(source)) { - const cid = await resolvePath(dagReader, path) + const cid = await resolvePath(ipld, path) // verify that each hash can be pinned const { reason } = await pinManager.isPinnedWithType(cid, [PinTypes.recursive, PinTypes.direct]) @@ -79,22 +75,3 @@ module.exports = ({ pinManager, gcLock, dagReader }) => { return withTimeoutOption(addAll) } - -/** - * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Source} Source - * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Pin} PinTarget - * - * @typedef {AddSettings & AbortOptions} AddOptions - * - * @typedef {Object} AddSettings - * @property {boolean} [lock] - * - * @typedef {import('.').AbortOptions} AbortOptions - * - * @typedef {import('.').CID} CID - */ - -/** - * @template T - * @typedef {Iterable|AsyncIterable} AwaitIterable - */ diff --git a/packages/ipfs-core/src/components/pin/add.js b/packages/ipfs-core/src/components/pin/add.js index 8d10ea31f4..842d34d166 100644 --- a/packages/ipfs-core/src/components/pin/add.js +++ b/packages/ipfs-core/src/components/pin/add.js @@ -1,6 +1,7 @@ 'use strict' const last = require('it-last') +const CID = require('cids') /** * @param {Object} config @@ -8,19 +9,23 @@ const last = require('it-last') */ module.exports = ({ addAll }) => /** - * @param {CID|string} path - * @param {AddOptions & AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/pin').API["add"]} */ - async (path, options = {}) => - /** @type {CID} - Need to loosen check here because it could be void */ - (await last(addAll({ path, ...options }, options))) + (path, options = {}) => { + let iter -/** - * @typedef {Object} AddOptions - * @property {boolean} [lock] - * @property {boolean} [recursive] - Recursively pin all links contained by the object - * - * @typedef {import('../../utils').AbortOptions} AbortOptions - * @typedef {import('..').CID} CID - */ + if (CID.isCID(path)) { + iter = addAll([{ + cid: path, + ...options + }], options) + } else { + iter = addAll([{ + path: path.toString(), + ...options + }], options) + } + + // @ts-ignore return value of last can be undefined + return last(iter) + } diff --git a/packages/ipfs-core/src/components/pin/index.js b/packages/ipfs-core/src/components/pin/index.js index 2e3ee7c66b..468cab5d48 100644 --- a/packages/ipfs-core/src/components/pin/index.js +++ b/packages/ipfs-core/src/components/pin/index.js @@ -6,30 +6,41 @@ const createLs = require('./ls') const createRm = require('./rm') const createRmAll = require('./rm-all') +/** + * @typedef {import('../gc-lock').GCLock} GCLock + * @typedef {import('./pin-manager')} PinManager + */ + class PinAPI { /** * @param {Object} config * @param {GCLock} config.gcLock - * @param {DagReader} config.dagReader + * @param {import('ipld')} config.ipld * @param {PinManager} config.pinManager */ - constructor ({ gcLock, dagReader, pinManager }) { - const addAll = createAddAll({ gcLock, dagReader, pinManager }) + constructor ({ gcLock, ipld, pinManager }) { + const addAll = createAddAll({ gcLock, ipld, pinManager }) this.addAll = addAll this.add = createAdd({ addAll }) - const rmAll = createRmAll({ gcLock, dagReader, pinManager }) + const rmAll = createRmAll({ gcLock, ipld, pinManager }) this.rmAll = rmAll this.rm = createRm({ rmAll }) - this.ls = createLs({ dagReader, pinManager }) + this.ls = createLs({ ipld, pinManager }) + + const notImplemented = () => Promise.reject(new Error('Not implemented')) + + this.remote = { + add: notImplemented, + ls: notImplemented, + rm: notImplemented, + rmAll: notImplemented, + service: { + add: notImplemented, + rm: notImplemented, + ls: notImplemented + } + } } } -module.exports = PinAPI -/** - * @typedef {import('..').Repo} Repo - * @typedef {import('..').GCLock} GCLock - * @typedef {import('..').DagReader} DagReader - * @typedef {import('..').PinManager} PinManager - * @typedef {import('..').AbortOptions} AbortOptions - * @typedef {import('..').CID} CID - */ +module.exports = PinAPI diff --git a/packages/ipfs-core/src/components/pin/ls.js b/packages/ipfs-core/src/components/pin/ls.js index 3fb486288a..b06b255fcc 100644 --- a/packages/ipfs-core/src/components/pin/ls.js +++ b/packages/ipfs-core/src/components/pin/ls.js @@ -7,7 +7,17 @@ const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') const { resolvePath } = require('../../utils') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +/** + * @typedef {import('cids')} CID + */ + +/** + * @param {string} type + * @param {CID} cid + * @param {Record} [metadata] + */ function toPin (type, cid, metadata) { + /** @type {import('ipfs-core-types/src/pin').LsResult} */ const output = { type, cid @@ -22,51 +32,21 @@ function toPin (type, cid, metadata) { /** * @param {Object} config - * @param {import('.').PinManager} config.pinManager - * @param {import('.').DagReader} config.dagReader + * @param {import('./pin-manager')} config.pinManager + * @param {import('ipld')} config.ipld */ -module.exports = ({ pinManager, dagReader }) => { +module.exports = ({ pinManager, ipld }) => { /** - * List all the objects pinned to local storage - * - * @param {LsOptions} [options] - * @returns {AsyncIterable} - * @example - * ```js - * for await (const { cid, type } of ipfs.pin.ls()) { - * console.log({ cid, type }) - * } - * // { cid: CID(Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E), type: 'recursive' } - * // { cid: CID(QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ), type: 'indirect' } - * // { cid: CID(QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R), type: 'indirect' } - * - * const paths = [ - * CID.from('Qmc5..'), - * CID.from('QmZb..'), - * CID.from('QmSo..') - * ] - * for await (const { cid, type } of ipfs.pin.ls({ paths })) { - * console.log({ cid, type }) - * } - * // { cid: CID(Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E), type: 'recursive' } - * // { cid: CID(QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ), type: 'indirect' } - * // { cid: CID(QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R), type: 'indirect' } - * ``` + * @type {import('ipfs-core-types/src/pin').API["ls"]} */ async function * ls (options = {}) { - /** @type {PinQueryType} */ + /** @type {import('ipfs-core-types/src/pin').PinQueryType} */ let type = PinTypes.all if (options.type) { type = options.type - if (typeof options.type === 'string') { - // @ts-ignore - Can't infer that string returned by toLowerCase() is PinQueryType - type = options.type.toLowerCase() - } PinManager.checkPinType(type) - } else { - options.type = PinTypes.all } if (options.paths) { @@ -74,7 +54,7 @@ module.exports = ({ pinManager, dagReader }) => { let matched = false for await (const { path } of normaliseInput(options.paths)) { - const cid = await resolvePath(dagReader, path) + const cid = await resolvePath(ipld, path) const { reason, pinned, parent, metadata } = await pinManager.isPinnedWithType(cid, type) if (!pinned) { @@ -123,21 +103,3 @@ module.exports = ({ pinManager, dagReader }) => { return withTimeoutOption(ls) } - -/** - * @typedef {LsSettings & AbortOptions} LsOptions - * - * @typedef {Object} LsSettings - * @property {string[]|CID[]} [paths] - CIDs or IPFS paths to search for in the pinset. - * @property {PinQueryType} [type] - Filter by this type of pin ("recursive", "direct" or "indirect") - * - * @typedef {Object} LsEntry - * @property {CID} cid - CID of the pinned node - * @property {PinType} type - Pin type ("recursive", "direct" or "indirect") - * - * @typedef {import('./pin-manager').PinType} PinType - * @typedef {import('./pin-manager').PinQueryType} PinQueryType - * - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('.').CID} CID - */ diff --git a/packages/ipfs-core/src/components/pin/pin-manager.js b/packages/ipfs-core/src/components/pin/pin-manager.js index e284831ec2..c91b3d668a 100644 --- a/packages/ipfs-core/src/components/pin/pin-manager.js +++ b/packages/ipfs-core/src/components/pin/pin-manager.js @@ -3,20 +3,47 @@ const CID = require('cids') const errCode = require('err-code') +// @ts-ignore const dagCborLinks = require('dag-cbor-links') const debug = require('debug') // const parallelBatch = require('it-parallel-batch') const first = require('it-first') const all = require('it-all') -const cbor = require('cbor') +// @ts-ignore +// TODO: cborg has no main in package.json +const cborg = require('cborg') const multibase = require('multibase') const multicodec = require('multicodec') +const { Key } = require('interface-datastore') + +/** + * @typedef {object} Pin + * @property {number} depth + * @property {CID.CIDVersion} [version] + * @property {multicodec.CodecCode} [codec] + * @property {Record} [metadata] + */ + +/** + * @typedef {import('ipfs-core-types/src/pin').PinType} PinType + * @typedef {import('ipfs-core-types/src/pin').PinQueryType} PinQueryType + */ + +/** + * @typedef {Object} PinOptions + * @property {any} [metadata] + * + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ // arbitrary limit to the number of concurrent dag operations // const WALK_DAG_CONCURRENCY_LIMIT = 300 // const IS_PINNED_WITH_TYPE_CONCURRENCY_LIMIT = 300 // const PIN_DS_KEY = new Key('/local/pins') +/** + * @param {string} type + */ function invalidPinTypeErr (type) { const errMsg = `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE') @@ -24,19 +51,20 @@ function invalidPinTypeErr (type) { const encoder = multibase.encoding('base32upper') +/** + * @param {CID} cid + */ function cidToKey (cid) { - return `/${encoder.encode(cid.multihash)}` + return new Key(`/${encoder.encode(cid.multihash)}`) } +/** + * @param {Key | string} key + */ function keyToMultihash (key) { return encoder.decode(key.toString().slice(1)) } -/** - * @typedef {'direct'|'recursive'|'indirect'} PinType - * @typedef {PinType|'all'} PinQueryType - */ - const PinTypes = { /** @type {'direct'} */ direct: ('direct'), @@ -51,12 +79,12 @@ const PinTypes = { class PinManager { /** * @param {Object} config - * @param {import('.').Repo} config.repo - * @param {import('.').DagReader} config.dagReader + * @param {import('ipfs-repo')} config.repo + * @param {import('ipld')} config.ipld */ - constructor ({ repo, dagReader }) { + constructor ({ repo, ipld }) { this.repo = repo - this.dag = dagReader + this.ipld = ipld this.log = debug('ipfs:pin') this.directPins = new Set() this.recursivePins = new Set() @@ -65,21 +93,22 @@ class PinManager { /** * @private * @param {CID} cid - * @param {Object} options - * @param {boolean} [options.preload] + * @param {AbortOptions} [options] + * @returns {AsyncGenerator} */ - async * _walkDag (cid, { preload = false }) { - const { value: node } = await this.dag.get(cid, { preload }) + async * _walkDag (cid, options) { + const node = await this.ipld.get(cid, options) if (cid.codec === 'dag-pb') { + // @ts-ignore for (const link of node.Links) { yield link.Hash - yield * this._walkDag(link.Hash, { preload }) + yield * this._walkDag(link.Hash, options) } } else if (cid.codec === 'dag-cbor') { for (const [, childCid] of dagCborLinks(node)) { yield childCid - yield * this._walkDag(childCid, { preload }) + yield * this._walkDag(childCid, options) } } } @@ -90,8 +119,9 @@ class PinManager { * @returns {Promise} */ async pinDirectly (cid, options = {}) { - await this.dag.get(cid, options) + await this.ipld.get(cid, options) + /** @type {Pin} */ const pin = { depth: 0 } @@ -108,7 +138,7 @@ class PinManager { pin.metadata = options.metadata } - return this.repo.pins.put(cidToKey(cid), cbor.encode(pin)) + return this.repo.pins.put(cidToKey(cid), cborg.encode(pin)) } /** @@ -123,12 +153,13 @@ class PinManager { /** * @param {CID} cid - * @param {PreloadOptions & PinOptions & AbortOptions} [options] + * @param {PinOptions & AbortOptions} [options] * @returns {Promise} */ async pinRecursively (cid, options = {}) { await this.fetchCompleteDag(cid, options) + /** @type {Pin} */ const pin = { depth: Infinity } @@ -145,22 +176,21 @@ class PinManager { pin.metadata = options.metadata } - await this.repo.pins.put(cidToKey(cid), cbor.encode(pin)) + await this.repo.pins.put(cidToKey(cid), cborg.encode(pin)) } /** * @param {AbortOptions} [options] - * @returns {AsyncIterable<{ cid: CID, metadata: any }>} */ async * directKeys (options) { for await (const entry of this.repo.pins.query({ filters: [(entry) => { - const pin = cbor.decode(entry.value) + const pin = cborg.decode(entry.value) return pin.depth === 0 }] })) { - const pin = cbor.decode(entry.value) + const pin = cborg.decode(entry.value) const version = pin.version || 0 const codec = pin.codec ? multicodec.getName(pin.codec) : 'dag-pb' const multihash = keyToMultihash(entry.key) @@ -174,17 +204,16 @@ class PinManager { /** * @param {AbortOptions} [options] - * @returns {AsyncIterable<{ cid: CID, metadata: any }>} */ async * recursiveKeys (options) { for await (const entry of this.repo.pins.query({ filters: [(entry) => { - const pin = cbor.decode(entry.value) + const pin = cborg.decode(entry.value) return pin.depth === Infinity }] })) { - const pin = cbor.decode(entry.value) + const pin = cborg.decode(entry.value) const version = pin.version || 0 const codec = pin.codec ? multicodec.getName(pin.codec) : 'dag-pb' const multihash = keyToMultihash(entry.key) @@ -197,12 +226,11 @@ class PinManager { } /** - * @param {Object} options - * @param {boolean} [options.preload] + * @param {AbortOptions} [options] */ - async * indirectKeys ({ preload }) { + async * indirectKeys (options) { for await (const { cid } of this.recursiveKeys()) { - for await (const childCid of this._walkDag(cid, { preload })) { + for await (const childCid of this._walkDag(cid, options)) { // recursive pins override indirect pins const types = [ PinTypes.recursive @@ -236,13 +264,13 @@ class PinManager { if (recursive || direct || all) { const result = await first(this.repo.pins.query({ - prefix: cidToKey(cid), + prefix: cidToKey(cid).toString(), filters: [entry => { if (all) { return true } - const pin = cbor.decode(entry.value) + const pin = cborg.decode(entry.value) return types.includes(pin.depth === 0 ? PinTypes.direct : PinTypes.recursive) }], @@ -250,7 +278,7 @@ class PinManager { })) if (result) { - const pin = cbor.decode(result.value) + const pin = cborg.decode(result.value) return { cid, @@ -263,9 +291,13 @@ class PinManager { const self = this + /** + * @param {CID} key + * @param {AsyncIterable<{ cid: CID, metadata: any }>} source + */ async function * findChild (key, source) { for await (const { cid: parentCid } of source) { - for await (const childCid of self._walkDag(parentCid, { preload: false })) { + for await (const childCid of self._walkDag(parentCid)) { if (childCid.equals(key)) { yield parentCid return @@ -298,10 +330,10 @@ class PinManager { /** * @param {CID} cid - * @param {PreloadOptions & AbortOptions} options + * @param {AbortOptions} options */ async fetchCompleteDag (cid, options) { - await all(this._walkDag(cid, { preload: options.preload })) + await all(this._walkDag(cid, options)) } /** @@ -321,13 +353,3 @@ class PinManager { PinManager.PinTypes = PinTypes module.exports = PinManager - -/** - * @typedef {Object} PinOptions - * @property {any} [metadata] - * - * @typedef {Object} PreloadOptions - * @property {boolean} [preload] - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/pin/rm-all.js b/packages/ipfs-core/src/components/pin/rm-all.js index 2157acd407..9e84fb4274 100644 --- a/packages/ipfs-core/src/components/pin/rm-all.js +++ b/packages/ipfs-core/src/components/pin/rm-all.js @@ -7,28 +7,13 @@ const { PinTypes } = require('./pin-manager') /** * @param {Object} config - * @param {import('.').PinManager} config.pinManager + * @param {import('./pin-manager')} config.pinManager * @param {import('.').GCLock} config.gcLock - * @param {import('.').DagReader} config.dagReader + * @param {import('ipld')} config.ipld */ -module.exports = ({ pinManager, gcLock, dagReader }) => { +module.exports = ({ pinManager, gcLock, ipld }) => { /** - * Unpin one or more blocks from your repo - * - * @param {Source} source - Unpin all pins from the source - * @param {AbortOptions} [_options] - * @returns {AsyncIterable} - * @example - * ```js - * const source = [ - * CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') - * ] - * for await (const cid of ipfs.pin.rmAll(source)) { - * console.log(cid) - * } - * // prints the CIDs that were unpinned - * // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') - * ``` + * @type {import('ipfs-core-types/src/pin').API["rmAll"]} */ async function * rmAll (source, _options = {}) { const release = await gcLock.readLock() @@ -36,7 +21,7 @@ module.exports = ({ pinManager, gcLock, dagReader }) => { try { // verify that each hash can be unpinned for await (const { path, recursive } of normaliseInput(source)) { - const cid = await resolvePath(dagReader, path) + const cid = await resolvePath(ipld, path) const { pinned, reason } = await pinManager.isPinnedWithType(cid, PinTypes.all) if (!pinned) { @@ -71,9 +56,3 @@ module.exports = ({ pinManager, gcLock, dagReader }) => { return withTimeoutOption(rmAll) } - -/** - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - * @typedef {import('./add-all').Source} Source - */ diff --git a/packages/ipfs-core/src/components/pin/rm.js b/packages/ipfs-core/src/components/pin/rm.js index dbb9c47317..ab939b00e4 100644 --- a/packages/ipfs-core/src/components/pin/rm.js +++ b/packages/ipfs-core/src/components/pin/rm.js @@ -4,34 +4,13 @@ const last = require('it-last') /** * @param {Object} config - * @param {ReturnType} config.rmAll + * @param {import('ipfs-core-types/src/pin').API["rmAll"]} config.rmAll */ module.exports = ({ rmAll }) => /** - * Unpin this block from your repo - * - * @param {string|CID} path - CID or IPFS Path to unpin. - * @param {RmOptions} [options] - * @returns {Promise} - The CIDs that was unpinned - * @example - * ```js - * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') - * const result = await ipfs.pin.rm(cid) - * console.log(result) - * // prints the CID that was unpinned - * // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') - * ``` + * @type {import('ipfs-core-types/src/pin').API["rm"]} */ - async (path, options) => - /** @type {CID} - Need to loosen check here because it could be void */ - (await last(rmAll({ path, ...options }, options))) - -/** - * @typedef {RmSettings & AbortOptions} RmOptions - * - * @typedef {Object} RmSettings - * @property {boolean} [recursive=true] - Recursively unpin the object linked - * - * @typedef {import('..').CID} CID - * @typedef {import('../../utils').AbortOptions} AbortOptions - */ + (path, options) => { + // @ts-ignore return value of last can be undefined + return last(rmAll([{ path, ...options }], options)) + } diff --git a/packages/ipfs-core/src/components/ping.js b/packages/ipfs-core/src/components/ping.js index 295263085d..d678616288 100644 --- a/packages/ipfs-core/src/components/ping.js +++ b/packages/ipfs-core/src/components/ping.js @@ -7,25 +7,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Send echo request packets to IPFS hosts. - * - * @param {PeerId} peerId - The remote peer to send packets to - * @param {PingOptions} [options] - * @returns {AsyncIterable} - * @example - * ```js - * for await (const res of ipfs.ping('Qmhash')) { - * if (res.time) { - * console.log(`Pong received: time=${res.time} ms`) - * } else { - * console.log(res.text) - * } - * } - * ``` + * @type {import('ipfs-core-types/src/root').API["ping"]} */ async function * ping (peerId, options = {}) { const { libp2p } = await network.use() @@ -100,5 +86,5 @@ module.exports = ({ network }) => { * @typedef {Object} PingSettings * @property {number} [count=10] - The number of ping messages to send * - * @typedef {import('.').AbortOptions} AbortOptions + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions */ diff --git a/packages/ipfs-core/src/components/pubsub.js b/packages/ipfs-core/src/components/pubsub.js index 56b659477a..2592425024 100644 --- a/packages/ipfs-core/src/components/pubsub.js +++ b/packages/ipfs-core/src/components/pubsub.js @@ -7,11 +7,11 @@ const get = require('dlv') /** * @param {Object} config - * @param {import('.').NetworkService} config.network - * @param {import('.').IPFSConfig} [config.config] + * @param {import('../types').NetworkService} config.network + * @param {import('ipfs-core-types/src/config').Config} [config.config] */ module.exports = ({ network, config }) => { - const isEnabled = get(config, 'Pubsub.Enabled', true) + const isEnabled = get(config || {}, 'Pubsub.Enabled', true) return { subscribe: isEnabled ? withTimeoutOption(subscribe) : notEnabled, @@ -22,22 +22,7 @@ module.exports = ({ network, config }) => { } /** - * Subscribe to a pubsub topic. - * - * @example - * ```js - * const topic = 'fruit-of-the-day' - * const receiveMsg = (msg) => console.log(msg.data.toString()) - * - * await ipfs.pubsub.subscribe(topic, receiveMsg) - * console.log(`subscribed to ${topic}`) - * ``` - * - * @param {string} topic - The topic name - * @param {(message:Message) => void} handler - Event handler which will be - * called with a message object everytime one is received. - * @param {AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/pubsub').API["subscribe"]} */ async function subscribe (topic, handler, options) { const { libp2p } = await network.use(options) @@ -46,33 +31,7 @@ module.exports = ({ network, config }) => { } /** - * Unsubscribes from a pubsub topic. - * - * @example - * ```js - * const topic = 'fruit-of-the-day' - * const receiveMsg = (msg) => console.log(msg.toString()) - * - * await ipfs.pubsub.subscribe(topic, receiveMsg) - * console.log(`subscribed to ${topic}`) - * - * await ipfs.pubsub.unsubscribe(topic, receiveMsg) - * console.log(`unsubscribed from ${topic}`) - * - * // Or removing all listeners: - * - * const topic = 'fruit-of-the-day' - * const receiveMsg = (msg) => console.log(msg.toString()) - * await ipfs.pubsub.subscribe(topic, receiveMsg); - * // Will unsubscribe ALL handlers for the given topic - * await ipfs.pubsub.unsubscribe(topic); - * ``` - * - * @param {string} topic - The topic to unsubscribe from - * @param {(message:Message) => void} [handler] - The handler to remove. If - * not provided unsubscribes al handlers for the topic. - * @param {AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/pubsub').API["unsubscribe"]} */ async function unsubscribe (topic, handler, options) { const { libp2p } = await network.use(options) @@ -81,22 +40,7 @@ module.exports = ({ network, config }) => { } /** - * Publish a data message to a pubsub topic. - * - * @example - * ```js - * const topic = 'fruit-of-the-day' - * const msg = new TextEncoder().encode('banana') - * - * await ipfs.pubsub.publish(topic, msg) - * // msg was broadcasted - * console.log(`published to ${topic}`) - * ``` - * - * @param {string} topic - * @param {Uint8Array} data - * @param {AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/pubsub').API["publish"]} */ async function publish (topic, data, options) { const { libp2p } = await network.use(options) @@ -105,11 +49,9 @@ module.exports = ({ network, config }) => { } await libp2p.pubsub.publish(topic, data) } + /** - * Returns the list of subscriptions the peer is subscribed to. - * - * @param {AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/pubsub').API["ls"]} */ async function ls (options) { const { libp2p } = await network.use(options) @@ -117,19 +59,7 @@ module.exports = ({ network, config }) => { } /** - * Returns the peers that are subscribed to one topic. - * - * @example - * ```js - * const topic = 'fruit-of-the-day' - * - * const peerIds = await ipfs.pubsub.peers(topic) - * console.log(peerIds) - * ``` - * - * @param {string} topic - * @param {AbortOptions} [options] - * @returns {Promise} - An array of peer IDs subscribed to the topic + * @type {import('ipfs-core-types/src/pubsub').API["peers"]} */ async function peers (topic, options) { const { libp2p } = await network.use(options) @@ -140,13 +70,3 @@ module.exports = ({ network, config }) => { const notEnabled = async () => { // eslint-disable-line require-await throw new NotEnabledError('pubsub not enabled') } - -/** - * @typedef {Object} Message - * @property {string} from - * @property {Uint8Array} seqno - * @property {Uint8Array} data - * @property {string[]} topicIDs - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/refs/index.js b/packages/ipfs-core/src/components/refs/index.js index 438532a74d..db4018883e 100644 --- a/packages/ipfs-core/src/components/refs/index.js +++ b/packages/ipfs-core/src/components/refs/index.js @@ -1,31 +1,37 @@ 'use strict' -const isIpfs = require('is-ipfs') const CID = require('cids') const { DAGNode } = require('ipld-dag-pb') -const { normalizeCidPath } = require('../../utils') const { Errors } = require('interface-datastore') const ERR_NOT_FOUND = Errors.notFoundError().code const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') +const toCIDAndPath = require('ipfs-core-utils/src/to-cid-and-path') const Format = { default: '', edges: ' -> ' } +/** + * @typedef {object} Node + * @property {string} [name] + * @property {CID} cid + * + * @typedef {object} TraversalResult + * @property {Node} parent + * @property {Node} node + * @property {boolean} isDuplicate + */ + /** * @param {Object} config - * @param {import('..').IPLD} config.ipld - * @param {import('..').Resolve} config.resolve - * @param {import('..').Preload} config.preload + * @param {import('ipld')} config.ipld + * @param {import('ipfs-core-types/src/root').API["resolve"]} config.resolve + * @param {import('../../types').Preload} config.preload */ module.exports = function ({ ipld, resolve, preload }) { /** - * Get links (references) from an object - * - * @param {CID|string} ipfsPath - The object to search for references - * @param {RefsOptions & AbortOptions} [options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/refs').API["refs"]} */ async function * refs (ipfsPath, options = {}) { if (options.maxDepth === 0) { @@ -36,7 +42,7 @@ module.exports = function ({ ipld, resolve, preload }) { throw new Error('Cannot set edges to true and also specify format') } - options.format = options.edges ? Format.edges : options.format || Format.default + options.format = options.edges ? Format.edges : options.format if (typeof options.maxDepth !== 'number') { options.maxDepth = options.recursive ? Infinity : 1 @@ -57,34 +63,44 @@ module.exports = function ({ ipld, resolve, preload }) { module.exports.Format = Format +/** + * @param {import('../../types').Preload} preload + * @param {string | CID} ipfsPath + * @param {import('ipfs-core-types/src/refs').RefsOptions} options + */ function getFullPath (preload, ipfsPath, options) { - // normalizeCidPath() strips /ipfs/ off the front of the path so the CID will - // be at the front of the path - const path = normalizeCidPath(ipfsPath) - const pathComponents = path.split('/') - const cid = pathComponents[0] - - if (!isIpfs.cid(cid)) { - throw new Error(`Error resolving path '${path}': '${cid}' is not a valid CID`) - } + const { + cid, + path + } = toCIDAndPath(ipfsPath) if (options.preload !== false) { preload(cid) } - return '/ipfs/' + path + return `/ipfs/${cid}${path || ''}` } -// Get a stream of refs at the given path +/** + * Get a stream of refs at the given path + * + * @param {import('ipfs-core-types/src/root').API["resolve"]} resolve + * @param {import('ipld')} ipld + * @param {string} path + * @param {import('ipfs-core-types/src/refs').RefsOptions} options + */ async function * refsStream (resolve, ipld, path, options) { // Resolve to the target CID of the path const resPath = await resolve(path) - // path is /ipfs/ - const parts = resPath.split('/') - const cid = parts[2] + const { + cid + } = toCIDAndPath(resPath) + + const maxDepth = options.maxDepth != null ? options.maxDepth : Infinity + const unique = options.unique || false // Traverse the DAG, converting it into a stream - for await (const obj of objectStream(ipld, cid, options.maxDepth, options.unique)) { + for await (const obj of objectStream(ipld, cid, maxDepth, unique)) { // Root object will not have a parent if (!obj.parent) { continue @@ -103,18 +119,37 @@ async function * refsStream (resolve, ipld, path, options) { } } -// Get formatted link -function formatLink (srcCid, dstCid, linkName, format) { +/** + * Get formatted link + * + * @param {CID} srcCid + * @param {CID} dstCid + * @param {string} [linkName] + * @param {string} [format] + */ +function formatLink (srcCid, dstCid, linkName = '', format = Format.default) { let out = format.replace(//g, srcCid.toString()) out = out.replace(//g, dstCid.toString()) out = out.replace(//g, linkName) return out } -// Do a depth first search of the DAG, starting from the given root cid +/** + * Do a depth first search of the DAG, starting from the given root cid + * + * @param {import('ipld')} ipld + * @param {CID} rootCid + * @param {number} maxDepth + * @param {boolean} uniqueOnly + */ async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint-disable-line require-await const seen = new Set() + /** + * @param {Node} parent + * @param {number} depth + * @returns {AsyncGenerator} + */ async function * traverseLevel (parent, depth) { const nextLevelDepth = depth + 1 @@ -151,19 +186,35 @@ async function * objectStream (ipld, rootCid, maxDepth, uniqueOnly) { // eslint- yield * traverseLevel({ cid: rootCid }, 0) } -// Fetch a node from IPLD then get all its links +/** + * Fetch a node from IPLD then get all its links + * + * @param {import('ipld')} ipld + * @param {CID} cid + */ async function getLinks (ipld, cid) { - const node = await ipld.get(new CID(cid)) - - if (DAGNode.isDAGNode(node)) { - return node.Links.map(({ Name, Hash }) => ({ name: Name, cid: new CID(Hash) })) + const node = await ipld.get(cid) + + if (node instanceof DAGNode) { + /** + * @param {import('ipld-dag-pb').DAGLink} arg + */ + const mapper = ({ Name, Hash }) => ({ name: Name, cid: Hash }) + return node.Links.map(mapper) } return getNodeLinks(node) } -// Recursively search the node for CIDs +/** + * Recursively search the node for CIDs + * + * @param {object} node + * @param {string} [path] + * @returns {Node[]} + */ function getNodeLinks (node, path = '') { + /** @type {Node[]} */ let links = [] for (const [name, value] of Object.entries(node)) { if (CID.isCID(value)) { @@ -177,17 +228,3 @@ function getNodeLinks (node, path = '') { } return links } - -/** - * @typedef {Object} RefsOptions - * @property {boolean} [recursive=false] - Recursively list references of child nodes - * @property {boolean} [unique=false] - Omit duplicate references from output - * @property {string} [format=''] - Output edges with given format. Available tokens: ``, ``, `` - * @property {boolean} [edges=false] - output references in edge format: `" -> "` - * @property {number} [maxDepth=1] - only for recursive refs, limits fetch and listing to the given depth - * - * @typedef {{ref:string, err?:null}|{ref?:undefined, err:Error}} RefResult - * - * @typedef {import('..').AbortOptions} AbortOptions - * @typedef {import('..').Repo} Repo - */ diff --git a/packages/ipfs-core/src/components/refs/local.js b/packages/ipfs-core/src/components/refs/local.js index 0615b05a2d..e8a87bdd63 100644 --- a/packages/ipfs-core/src/components/refs/local.js +++ b/packages/ipfs-core/src/components/refs/local.js @@ -4,12 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = function ({ repo }) { /** - * @param {import('.').AbortOptions} [options] - * @returns {AsyncIterable<{ref: string}>} + * @type {import('ipfs-core-types/src/refs').API["local"]} */ async function * refsLocal (options = {}) { // @ts-ignore - TS is not aware of keysOnly diff --git a/packages/ipfs-core/src/components/repo/gc.js b/packages/ipfs-core/src/components/repo/gc.js index 327f156bea..206091da7e 100644 --- a/packages/ipfs-core/src/components/repo/gc.js +++ b/packages/ipfs-core/src/components/repo/gc.js @@ -12,19 +12,25 @@ const multibase = require('multibase') // Limit on the number of parallel block remove operations const BLOCK_RM_CONCURRENCY = 256 +/** + * @typedef {import('ipfs-core-types/src/pin').API} PinAPI + * @typedef {import('ipfs-core-types/src/refs').API} RefsAPI + * @typedef {import('ipfs-repo')} IPFSRepo + * @typedef {import('interface-datastore').Key} Key + */ + /** * Perform mark and sweep garbage collection * * @param {Object} config - * @param {import('.').GCLock} config.gcLock - * @param {import('.').Pin} config.pin - * @param {import('.').Refs} config.refs - * @param {import('.').Repo} config.repo + * @param {import('../gc-lock').GCLock} config.gcLock + * @param {PinAPI} config.pin + * @param {RefsAPI["refs"]} config.refs + * @param {IPFSRepo} config.repo */ module.exports = ({ gcLock, pin, refs, repo }) => { /** - * @param {AbortOptions} [_options] - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/repo').API["gc"]} */ async function * gc (_options = {}) { const start = Date.now() @@ -51,7 +57,14 @@ module.exports = ({ gcLock, pin, refs, repo }) => { return withTimeoutOption(gc) } -// Get Set of CIDs of blocks to keep +/** + * Get Set of CIDs of blocks to keep + * + * @param {object} arg + * @param {PinAPI} arg.pin + * @param {RefsAPI["refs"]} arg.refs + * @param {IPFSRepo} arg.repo + */ async function createMarkedSet ({ pin, refs, repo }) { const pinsSource = map(({ cid }) => cid, pin.ls()) @@ -82,29 +95,43 @@ async function createMarkedSet ({ pin, refs, repo }) { return output } -// Delete all blocks that are not marked as in use +/** + * Delete all blocks that are not marked as in use + * + * @param {object} arg + * @param {IPFSRepo} arg.repo + * @param {Set} markedSet + * @param {AsyncIterable} blockKeys + */ async function * deleteUnmarkedBlocks ({ repo }, markedSet, blockKeys) { // Iterate through all blocks and find those that are not in the marked set // blockKeys yields { key: Key() } let blocksCount = 0 let removedBlocksCount = 0 + /** + * @param {CID} cid + */ const removeBlock = async (cid) => { blocksCount++ try { const b32 = multibase.encode('base32', cid.multihash).toString() - if (markedSet.has(b32)) return null - const res = { cid } + + if (markedSet.has(b32)) { + return null + } try { await repo.blocks.delete(cid) removedBlocksCount++ } catch (err) { - res.err = new Error(`Could not delete block with CID ${cid}: ${err.message}`) + return { + err: new Error(`Could not delete block with CID ${cid}: ${err.message}`) + } } - return res + return { cid } } catch (err) { const msg = `Could delete block with CID ${cid}` log(msg, err) @@ -120,19 +147,3 @@ async function * deleteUnmarkedBlocks ({ repo }, markedSet, blockKeys) { log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blocksCount} blocks. ` + `Deleted ${removedBlocksCount} blocks.`) } - -/** - * @typedef {import('../../utils').AbortOptions} AbortOptions - * - * @typedef {Err|BlockID} Notification - * - * @typedef {Object} Err - * @property {void} [cid] - * @property {Error} err - * - * @typedef {Object} BlockID - * @property {CID} cid - * @property {void} [err] - * - * @typedef {import('interface-datastore').Key} Key - */ diff --git a/packages/ipfs-core/src/components/repo/index.js b/packages/ipfs-core/src/components/repo/index.js index 82e4e401d5..7854c39df7 100644 --- a/packages/ipfs-core/src/components/repo/index.js +++ b/packages/ipfs-core/src/components/repo/index.js @@ -7,10 +7,10 @@ const createVersion = require('./version') class RepoAPI { /** * @param {Object} config - * @param {GCLock} config.gcLock - * @param {Pin} config.pin - * @param {Repo} config.repo - * @param {Refs} config.refs + * @param {import('../gc-lock').GCLock} config.gcLock + * @param {import('ipfs-core-types/src/pin').API} config.pin + * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-core-types/src/refs').API["refs"]} config.refs */ constructor ({ gcLock, pin, repo, refs }) { this.gc = createGC({ gcLock, pin, refs, repo }) @@ -19,11 +19,3 @@ class RepoAPI { } } module.exports = RepoAPI - -/** - * @typedef {import('..').GCLock} GCLock - * @typedef {import('..').Pin} Pin - * @typedef {import('..').Repo} Repo - * @typedef {import('..').Refs} Refs - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/repo/stat.js b/packages/ipfs-core/src/components/repo/stat.js index 612ec451fc..293cc08c3e 100644 --- a/packages/ipfs-core/src/components/repo/stat.js +++ b/packages/ipfs-core/src/components/repo/stat.js @@ -4,20 +4,20 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * @param {import('.').AbortOptions} [options] + * @type {import('ipfs-core-types/src/repo').API["stat"]} */ async function stat (options) { - const stats = await repo.stat(options) + const stats = await repo.stat() return { numObjects: stats.numObjects, repoSize: stats.repoSize, repoPath: stats.repoPath, - version: stats.version.toString(), + version: stats.version, storageMax: stats.storageMax } } diff --git a/packages/ipfs-core/src/components/repo/version.js b/packages/ipfs-core/src/components/repo/version.js index 25c6202595..8374c385ad 100644 --- a/packages/ipfs-core/src/components/repo/version.js +++ b/packages/ipfs-core/src/components/repo/version.js @@ -5,15 +5,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * If the repo has been initialized, report the current version. - * Otherwise report the version that would be initialized. - * - * @param {import('.').AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/repo').API["version"]} */ async function version (options) { try { @@ -35,7 +31,7 @@ module.exports = ({ repo }) => { throw err } - return repo.version.get(options) + return repo.version.get() } return withTimeoutOption(version) diff --git a/packages/ipfs-core/src/components/resolve.js b/packages/ipfs-core/src/components/resolve.js index 9c6b0b3ef7..267dbd6342 100644 --- a/packages/ipfs-core/src/components/resolve.js +++ b/packages/ipfs-core/src/components/resolve.js @@ -7,53 +7,14 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').IPLD} config.ipld - * @param {import('.').Name} config.name - An IPFS core interface name API + * @param {import('ipld')} config.ipld + * @param {import('ipfs-core-types/src/name').API} config.name - An IPFS core interface name API */ module.exports = ({ ipld, name }) => { /** - * Resolve the value of names to IPFS - * - * There are a number of mutable name protocols that can link among themselves - * and into IPNS. For example IPNS references can (currently) point at an IPFS - * object, and DNS links can point at other DNS links, IPNS entries, or IPFS - * objects. This command accepts any of these identifiers and resolves them - * to the referenced item. - * - * @param {string} path - The name to resolve - * @param {ResolveOptions} [opts] - * @returns {Promise} - A string representing the resolved name - * @example - * ```js - * // Resolve the value of your identity: - * const name = '/ipns/QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy' - * - * const res = await ipfs.resolve(name) - * console.log(res) - * // Logs: /ipfs/Qmcqtw8FfrVSBaRmbWwHxt3AuySBhJLcvmFYi3Lbc4xnwj - * - * // Resolve the value of another name recursively: - * const name = '/ipns/QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n' - * - * // Where: - * // /ipns/QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n - * // ...resolves to: - * // /ipns/QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy - * // ...which in turn resolves to: - * // /ipfs/Qmcqtw8FfrVSBaRmbWwHxt3AuySBhJLcvmFYi3Lbc4xnwj - * - * const res = await ipfs.resolve(name, { recursive: true }) - * console.log(res) - * // Logs: /ipfs/Qmcqtw8FfrVSBaRmbWwHxt3AuySBhJLcvmFYi3Lbc4xnwj - * - * // Resolve the value of an IPFS path: - * const name = '/ipfs/QmeZy1fGbwgVSrqbfh9fKQrAWgeyRnj7h8fsHS1oy3k99x/beep/boop' - * const res = await ipfs.resolve(name) - * console.log(res) - * // Logs: /ipfs/QmYRMjyvAiHKN9UTi8Bzt1HUspmSRD8T8DwxfSMzLgBon1 - * ``` + * @type {import('ipfs-core-types/src/root').API["resolve"]} */ - async function resolve (path, opts = {}) { + async function resolve (path, opts = { recursive: true, cidBase: 'base58btc' }) { if (!isIpfs.path(path)) { throw new Error('invalid argument ' + path) } @@ -94,13 +55,3 @@ module.exports = ({ ipld, name }) => { return withTimeoutOption(resolve) } - -/** - * @typedef {ResolveSettings & AbortOptions} ResolveOptions - * - * @typedef {Object} ResolveSettings - * @property {boolean} [recursive=true] - Resolve until result is an IPFS name. - * @property {import('cids').BaseNameOrCode} [cidBase='base58btc'] - Multibase codec name the CID in the resolved path will be encoded with. - * - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/root.js b/packages/ipfs-core/src/components/root.js index 9be83d8169..054a61e341 100644 --- a/packages/ipfs-core/src/components/root.js +++ b/packages/ipfs-core/src/components/root.js @@ -12,8 +12,6 @@ const createLsAPI = require('./ls') * @typedef {import('./cat').Context} CatContext * @typedef {import('./get').Context} GetContext * @typedef {import('./ls').Context} ListContext - * @typedef {import('ipfs-core-types/src').RootAPI} RootAPI - * @implements {RootAPI} */ class Root { /** diff --git a/packages/ipfs-core/src/components/start.js b/packages/ipfs-core/src/components/start.js index a9d512f377..b56b1c7d84 100644 --- a/packages/ipfs-core/src/components/start.js +++ b/packages/ipfs-core/src/components/start.js @@ -4,18 +4,21 @@ const Service = require('../utils/service') /** * @param {Object} config - * @param {import('.').NetworkService} config.network - * @param {import('.').PeerId} config.peerId - * @param {import('.').Repo} config.repo - * @param {import('.').BlockService} config.blockService - * @param {import('.').Print} config.print - * @param {import('.').Preload} config.preload - * @param {import('.').MFSPreload} config.mfsPreload - * @param {import('.').IPNS} config.ipns - * @param {import('.').Keychain} config.keychain - * @param {import('.').Options} config.options + * @param {import('../types').NetworkService} config.network + * @param {import('peer-id')} config.peerId + * @param {import('ipfs-repo')} config.repo + * @param {import('ipfs-block-service')} config.blockService + * @param {import('../types').Print} config.print + * @param {import('../types').Preload} config.preload + * @param {import('../types').MfsPreload} config.mfsPreload + * @param {import('./ipns')} config.ipns + * @param {import('libp2p/src/keychain')} config.keychain + * @param {import('../types').Options} config.options */ module.exports = ({ network, preload, peerId, keychain, repo, ipns, blockService, mfsPreload, print, options }) => { + /** + * @type {import('ipfs-core-types/src/root').API["start"]} + */ const start = async () => { const { bitswap, libp2p } = await Service.start(network, { peerId, diff --git a/packages/ipfs-core/src/components/stats/bw.js b/packages/ipfs-core/src/components/stats/bw.js index 1b6ecac384..60e85ec855 100644 --- a/packages/ipfs-core/src/components/stats/bw.js +++ b/packages/ipfs-core/src/components/stats/bw.js @@ -6,7 +6,26 @@ const errCode = require('err-code') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** - * @param {LibP2P} libp2p + * @typedef {Object} BWOptions + * @property {PeerId} [peer] - Specifies a peer to print bandwidth for + * @property {string} [proto] - Specifies a protocol to print bandwidth for + * @property {boolean} [poll] - Is used to yield bandwidth info at an interval + * @property {number|string} [interval=1000] - The time interval to wait between updating output, if `poll` is `true`. + * + * @typedef {Object} BandwidthInfo + * @property {Big} totalIn + * @property {Big} totalOut + * @property {Big} rateIn + * @property {Big} rateOut + * + * @typedef {import('libp2p')} libp2p + * @typedef {import('peer-id')} PeerId + * @typedef {import('cids')} CID + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ + +/** + * @param {libp2p} libp2p * @param {BWOptions} opts * @returns {BandwidthInfo} */ @@ -44,14 +63,11 @@ function getBandwidthStats (libp2p, opts) { /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Get IPFS bandwidth information - * - * @param {BWOptions & AbortOptions} options - * @returns {AsyncIterable} + * @type {import('ipfs-core-types/src/stats').API["bw"]} */ const bw = async function * (options = {}) { const { libp2p } = await network.use(options) @@ -84,22 +100,3 @@ module.exports = ({ network }) => { return withTimeoutOption(bw) } - -/** - * @typedef {Object} BWOptions - * @property {PeerId} [peer] - Specifies a peer to print bandwidth for - * @property {string} [proto] - Specifies a protocol to print bandwidth for - * @property {boolean} [poll] - Is used to yield bandwidth info at an interval - * @property {number|string} [interval=1000] - The time interval to wait between updating output, if `poll` is `true`. - * - * @typedef {Object} BandwidthInfo - * @property {Big} totalIn - * @property {Big} totalOut - * @property {Big} rateIn - * @property {Big} rateOut - * - * @typedef {import('.').LibP2P} LibP2P - * @typedef {import('.').PeerId} PeerId - * @typedef {import('.').CID} CID - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/stats/index.js b/packages/ipfs-core/src/components/stats/index.js index 6760c21853..700653cbcb 100644 --- a/packages/ipfs-core/src/components/stats/index.js +++ b/packages/ipfs-core/src/components/stats/index.js @@ -7,8 +7,8 @@ const createBitswap = require('../bitswap/stat') class StatsAPI { /** * @param {Object} config - * @param {Repo} config.repo - * @param {NetworkService} config.network + * @param {import('ipfs-repo')} config.repo + * @param {import('../../types').NetworkService} config.network */ constructor ({ repo, network }) { this.repo = createRepo({ repo }) @@ -18,12 +18,3 @@ class StatsAPI { } module.exports = StatsAPI - -/** - * @typedef {import('..').Repo} Repo - * @typedef {import('..').PeerId} PeerId - * @typedef {import('..').LibP2P} LibP2P - * @typedef {import('..').CID} CID - * @typedef {import('..').NetworkService} NetworkService - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/stop.js b/packages/ipfs-core/src/components/stop.js index 5ff0412014..5cde54ef1d 100644 --- a/packages/ipfs-core/src/components/stop.js +++ b/packages/ipfs-core/src/components/stop.js @@ -4,14 +4,17 @@ const Service = require('../utils/service') /** * @param {Object} config - * @param {import('.').NetworkService} config.network - * @param {import('.').Preload} config.preload - * @param {import('.').BlockService} config.blockService - * @param {import('.').IPNS} config.ipns - * @param {import('.').Repo} config.repo - * @param {import('.').MFSPreload} config.mfsPreload + * @param {import('../types').NetworkService} config.network + * @param {import('../types').Preload} config.preload + * @param {import('ipfs-block-service')} config.blockService + * @param {import('./ipns')} config.ipns + * @param {import('ipfs-repo')} config.repo + * @param {import('../types').MfsPreload} config.mfsPreload */ module.exports = ({ network, preload, blockService, ipns, repo, mfsPreload }) => { + /** + * @type {import('ipfs-core-types/src/root').API["stop"]} + */ const stop = async () => { blockService.unsetExchange() await Promise.all([ diff --git a/packages/ipfs-core/src/components/storage.js b/packages/ipfs-core/src/components/storage.js index d44a66bf8d..c8b50b5af1 100644 --- a/packages/ipfs-core/src/components/storage.js +++ b/packages/ipfs-core/src/components/storage.js @@ -12,12 +12,22 @@ const configService = require('./config') const { NotEnabledError } = require('../errors') const createLibP2P = require('./libp2p') +/** + * @typedef {import('ipfs-repo')} IPFSRepo + * @typedef {import('../types').Options} IPFSOptions + * @typedef {import('../types').InitOptions} InitOptions + * @typedef {import('../types').Print} Print + * @typedef {import('ipfs-core-types/src/config').Config} IPFSConfig + * @typedef {import('libp2p-crypto').KeyType} KeyType + * @typedef {import('libp2p/src/keychain')} Keychain + */ + class Storage { /** * @private * @param {PeerId} peerId * @param {Keychain} keychain - * @param {Repo} repo + * @param {IPFSRepo} repo * @param {Print} print * @param {boolean} isNew */ @@ -31,17 +41,17 @@ class Storage { } /** - * - * @param {Options} options + * @param {Print} print + * @param {IPFSOptions} options */ - static async start (options) { - const { repoAutoMigrate: autoMigrate, repo: inputRepo, print, silent } = options + static async start (print, options) { + const { repoAutoMigrate, repo: inputRepo } = options const repo = (typeof inputRepo === 'string' || inputRepo == null) - ? createRepo({ path: inputRepo, autoMigrate, silent }) + ? createRepo(print, { path: inputRepo, autoMigrate: Boolean(repoAutoMigrate) }) : inputRepo - const { peerId, keychain, isNew } = await loadRepo(repo, options) + const { peerId, keychain, isNew } = await loadRepo(print, repo, options) // TODO: throw error? // @ts-ignore On start, keychain will always be available @@ -51,58 +61,42 @@ class Storage { module.exports = Storage /** - * - * @param {Repo} repo - * @param {RepoOptions & InitOptions} options - * @returns {Promise<{peerId: PeerId, keychain?: Keychain, isNew:boolean }>} + * @param {Print} print + * @param {IPFSRepo} repo + * @param {IPFSOptions} options */ -const loadRepo = async (repo, options) => { - const openError = await openRepo(repo) - if (openError == null) { - // If opened successfully configure repo +const loadRepo = async (print, repo, options) => { + if (!repo.closed) { return { ...await configureRepo(repo, options), isNew: false } - } else if (openError.code === ERR_REPO_NOT_INITIALIZED) { - if (options.allowNew === false) { - throw new NotEnabledError('Initialization of new repos disabled by config, pass `config.init.isNew: true` to enable it') - } else { - // If failed to open, because repo isn't initilaized and initalizing a - // new repo allowed, init repo: - return { ...await initRepo(repo, options), isNew: true } - } - } else { - throw openError } -} -/** - * Attempts to open given repo unless it is already open and returns result - * containing repo or an error if failed. - * - * @param {Repo} repo - * @returns {Promise<(Error & { code: number }) | null>} - */ -const openRepo = async (repo) => { - // If repo is closed attempt to open it. - if (repo.closed) { - try { - await repo.open() - return null - } catch (error) { - return error + try { + await repo.open() + + return { ...await configureRepo(repo, options), isNew: false } + } catch (err) { + if (err.code !== ERR_REPO_NOT_INITIALIZED) { + throw err } - } else { - return null + + if (options.init && options.init.allowNew === false) { + throw new NotEnabledError('Initialization of new repos disabled by config, pass `config.init.isNew: true` to enable it') + } + + return { ...await initRepo(print, repo, options), isNew: true } } } /** - * @param {Repo} repo - * @param {RepoOptions & InitOptions} options + * @param {Print} print + * @param {IPFSRepo} repo + * @param {IPFSOptions} options * @returns {Promise<{peerId: PeerId, keychain?: Keychain}>} */ -const initRepo = async (repo, options) => { - // 1. Verify that repo does not exist yet (if it does and we could not - // open it we give up) +const initRepo = async (print, repo, options) => { + const initOptions = options.init || {} + + // 1. Verify that repo does not exist yet (if it does and we could not open it we give up) const exists = await repo.exists() log('repo exists?', exists) @@ -110,20 +104,18 @@ const initRepo = async (repo, options) => { throw new Error('repo already exists') } - // 2. Restore `peerId` from a given `.privateKey` or init new using - // provide options. - const peerId = options.privateKey - ? await decodePeerId(options.privateKey) - : await initPeerId(options) + // 2. Restore `peerId` from a given `.privateKey` or init new using provided options. + const peerId = initOptions.privateKey + ? await decodePeerId(initOptions.privateKey) + : await initPeerId(print, initOptions) const identity = peerIdToIdentity(peerId) log('peer identity: %s', identity.PeerID) - // 3. Init new repo with provided `.config` and restored / initalized - // peerd identity. + // 3. Init new repo with provided `.config` and restored / initialized `peerId` const config = { - ...mergeOptions(applyProfiles(getDefaultConfig(), options.profiles), options.config), + ...mergeOptions(applyProfiles(getDefaultConfig(), initOptions.profiles), options.config), Identity: identity } await repo.init(config) @@ -171,15 +163,15 @@ const decodePeerId = (peerId) => { } /** - * Initializes new PeerId by generting an underlying keypair. + * Initializes new PeerId by generating an underlying keypair. * + * @param {Print} print * @param {Object} options * @param {KeyType} [options.algorithm='RSA'] * @param {number} [options.bits=2048] - * @param {Print} options.print * @returns {Promise} */ -const initPeerId = ({ print, algorithm = 'RSA', bits = 2048 }) => { +const initPeerId = (print, { algorithm = 'RSA', bits = 2048 }) => { // Generate peer identity keypair + transform to desired format + add to config. print('generating %s-bit (rsa only) %s keypair...', bits, algorithm) return PeerId.create({ keyType: algorithm, bits }) @@ -195,14 +187,18 @@ const peerIdToIdentity = (peerId) => ({ }) /** - * Applies passed `profiles` and a `config` to an open repo. + * Applies passed `profiles` and a `config` to an open repo. * - * @param {Repo} repo - * @param {ConfigureOptions} options + * @param {IPFSRepo} repo + * @param {IPFSOptions} options * @returns {Promise<{peerId: PeerId, keychain?: Keychain}>} */ -const configureRepo = async (repo, { config, profiles, pass }) => { +const configureRepo = async (repo, options) => { + const config = options.config + const profiles = (options.init && options.init.profiles) || [] + const pass = options.pass const original = await repo.config.getAll() + // @ts-ignore TODO: move config types to repo const changed = mergeConfigs(applyProfiles(original, profiles), config) if (original !== changed) { @@ -253,51 +249,3 @@ const applyProfiles = (config, profiles) => { return profile.transform(config) }, config) } - -/** - * @typedef {StorageOptions & RepoOptions & InitOptions} Options - * - * @typedef {Object} StorageOptions - * @property {Repo|string} [repo='~/.jsipfs'] - The file path at which to store the - * IPFS node’s data. Alternatively, you can set up a customized storage system - * by providing an Repo implementation. (In browser default is 'ipfs'). - * @property {boolean} [repoAutoMigrate=true] - js-ipfs comes bundled with a tool - * that automatically migrates your IPFS repository when a new version is - * available. - * @property {boolean} [repoOwner] - * @property {IPLDOptions} [ipld] - * - * - * @typedef {Object} RepoOptions - * @property {Print} print - * @property {IPFSConfig} [config] - * @property {boolean} [silent] - * - * @typedef {Object} ConfigureOptions - * @property {IPFSConfig} [options.config] - * @property {string[]} [options.profiles] - * @property {string} [options.pass] - * - * @typedef {Object} InitOptions - On Frist run js-ipfs will initalize a repo - * which can be customized through this settings. - * @property {boolean} [emptyRepo=false] - Whether to remove built-in assets, - * like the instructional tour and empty mutable file system, from the repo. - * @property {KeyType} [algorithm='RSA'] - The type of key to use. - * @property {number} [bits=2048] - Number of bits to use in the generated key - * pair (rsa only). - * @property {PeerId|string} [privateKey] - A pre-generated private key to use. - * **NOTE: This overrides `bits`.** - * @property {string} [pass] - A passphrase to encrypt keys. You should - * generally use the top-level `pass` option instead of the `init.pass` - * option (this one will take its value from the top-level option if not set). - * @property {string[]} [profiles] - Apply profile settings to config. - * @property {boolean} [allowNew=true] - Set to `false` to disallow - * initialization if the repo does not already exist. - * - * @typedef {import('.').IPLDOptions} IPLDOptions - * @typedef {import('.').Print} Print - * @typedef {import('.').IPFSConfig} IPFSConfig - * @typedef {import('ipfs-core-types/src/repo').Repo} Repo - * @typedef {import('libp2p-crypto').KeyType} KeyType - * @typedef {import('libp2p/src/keychain')} Keychain - */ diff --git a/packages/ipfs-core/src/components/swarm/addrs.js b/packages/ipfs-core/src/components/swarm/addrs.js index 28ae132fd3..e8c04f6ff9 100644 --- a/packages/ipfs-core/src/components/swarm/addrs.js +++ b/packages/ipfs-core/src/components/swarm/addrs.js @@ -4,14 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * List of known addresses of each peer connected. - * - * @param {import('../../utils').AbortOptions} options - * @returns {Promise} + * @type {import('ipfs-core-types/src/swarm').API["addrs"]} */ async function addrs (options) { // eslint-disable-line require-await const peers = [] @@ -19,6 +16,7 @@ module.exports = ({ network }) => { for (const [peerId, peer] of libp2p.peerStore.peers.entries()) { peers.push({ id: peerId, + // @ts-ignore - libp2p types are missing addrs: peer.addresses.map((mi) => mi.multiaddr) }) } @@ -27,11 +25,3 @@ module.exports = ({ network }) => { return withTimeoutOption(addrs) } - -/** - * @typedef {Object} PeerInfo - * @property {string} id - * @property {Multiaddr[]} addrs - * - * @typedef {import('.').Multiaddr} Multiaddr - */ diff --git a/packages/ipfs-core/src/components/swarm/connect.js b/packages/ipfs-core/src/components/swarm/connect.js index 49c078a3b8..c49c4b2028 100644 --- a/packages/ipfs-core/src/components/swarm/connect.js +++ b/packages/ipfs-core/src/components/swarm/connect.js @@ -4,15 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Open a connection to a given address. - * - * @param {import('.').Multiaddr} addr - * @param {import('.').AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/swarm').API["connect"]} */ async function connect (addr, options) { const { libp2p } = await network.use(options) diff --git a/packages/ipfs-core/src/components/swarm/disconnect.js b/packages/ipfs-core/src/components/swarm/disconnect.js index 6e3c8ad0f7..40d8e80ed4 100644 --- a/packages/ipfs-core/src/components/swarm/disconnect.js +++ b/packages/ipfs-core/src/components/swarm/disconnect.js @@ -4,15 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Close a connection on a given address. - * - * @param {import('.').Multiaddr} addr - * @param {import('.').AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/swarm').API["disconnect"]} */ async function disconnect (addr, options) { const { libp2p } = await network.use(options) diff --git a/packages/ipfs-core/src/components/swarm/index.js b/packages/ipfs-core/src/components/swarm/index.js index 29af23f83b..c7986d9185 100644 --- a/packages/ipfs-core/src/components/swarm/index.js +++ b/packages/ipfs-core/src/components/swarm/index.js @@ -9,7 +9,7 @@ const createPeersAPI = require('./peers') class SwarmAPI { /** * @param {Object} config - * @param {NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ constructor ({ network }) { this.addrs = createAddrsAPI({ network }) @@ -21,9 +21,3 @@ class SwarmAPI { } module.exports = SwarmAPI - -/** - * @typedef {import('..').NetworkService} NetworkService - * @typedef {import('..').Multiaddr} Multiaddr - * @typedef {import('..').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/swarm/local-addrs.js b/packages/ipfs-core/src/components/swarm/local-addrs.js index e7321aa175..f5356fd099 100644 --- a/packages/ipfs-core/src/components/swarm/local-addrs.js +++ b/packages/ipfs-core/src/components/swarm/local-addrs.js @@ -4,14 +4,11 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Local addresses this node is listening on. - * - * @param {import('.').AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/swarm').API["localAddrs"]} */ async function localAddrs (options) { const { libp2p } = await network.use(options) @@ -20,7 +17,3 @@ module.exports = ({ network }) => { return withTimeoutOption(localAddrs) } - -/** - * @typedef {import('.').Multiaddr} Multiaddr - */ diff --git a/packages/ipfs-core/src/components/swarm/peers.js b/packages/ipfs-core/src/components/swarm/peers.js index 0c91011217..dfae29201e 100644 --- a/packages/ipfs-core/src/components/swarm/peers.js +++ b/packages/ipfs-core/src/components/swarm/peers.js @@ -4,37 +4,35 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').NetworkService} config.network + * @param {import('../../types').NetworkService} config.network */ module.exports = ({ network }) => { /** - * Local addresses this node is listening on. - * - * @param {PeersOptions & AbortOptions} [options] - * @returns {Promise} + * @type {import('ipfs-core-types/src/swarm').API["peers"]} */ async function peers (options = {}) { const { libp2p } = await network.use(options) - const verbose = options.v || options.verbose const peers = [] for (const [peerId, connections] of libp2p.connections) { for (const connection of connections) { - const tupple = { + /** @type {import('ipfs-core-types/src/swarm').PeersResult} */ + const peer = { addr: connection.remoteAddr, peer: peerId } - if (verbose || options.direction) { - tupple.direction = connection.stat.direction + if (options.verbose || options.direction) { + peer.direction = connection.stat.direction } - if (verbose) { - tupple.muxer = connection.stat.multiplexer - tupple.latency = 'n/a' + if (options.verbose) { + peer.muxer = connection.stat.multiplexer + peer.latency = 'n/a' + peer.streams = [] // TODO: get this from libp2p } - peers.push(tupple) + peers.push(peer) } } @@ -43,22 +41,3 @@ module.exports = ({ network }) => { return withTimeoutOption(peers) } - -/** - * @typedef {Object} PeerConnection - * @property {Multiaddr} addr - * @property {string} peer - * @property {string} [latency] - * @property {string} [muxer] - * @property {number} [direction] - * - * @typedef {Object} PeersOptions - * @property {boolean} [direction=false] - * @property {boolean} [streams=false] - * @property {boolean} [verbose=false] - * @property {boolean} [v=false] - * @property {boolean} [latency=false] - * - * @typedef {import('.').Multiaddr} Multiaddr - * @typedef {import('.').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/components/version.js b/packages/ipfs-core/src/components/version.js index e4c0e3c010..b7d7a46a2e 100644 --- a/packages/ipfs-core/src/components/version.js +++ b/packages/ipfs-core/src/components/version.js @@ -5,26 +5,18 @@ const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** * @param {Object} config - * @param {import('.').Repo} config.repo + * @param {import('ipfs-repo')} config.repo */ module.exports = ({ repo }) => { /** - * Returns the implementation version - * - * @param {import('.').AbortOptions} [options] - * @returns {Promise} - * @example - * ```js - * const version = await ipfs.version() - * console.log(version) - * ``` + * @type {import('ipfs-core-types/src/root').API["version"]} */ async function version (options) { - const repoVersion = await repo.version.get(options) + const repoVersion = await repo.version.get() return { version: pkg.version, - repo: repoVersion, + repo: `${repoVersion}`, // @ts-ignore gitHead is defined in published versions commit: pkg.gitHead || '', @@ -34,17 +26,3 @@ module.exports = ({ repo }) => { return withTimeoutOption(version) } - -/** - * @typedef {object} Version - * An object with the version information for the implementation, - * the commit and the Repo. `js-ipfs` instances will also return - * the version of `interface-ipfs-core` and `ipfs-http-client` - * supported by this node - * - * @property {string} version - * @property {number} repo - * @property {string} [commit] - * @property {string} [interface-ipfs-core] - * @property {string} [ipfs-http-client] - */ diff --git a/packages/ipfs-core/src/ipns/index.js b/packages/ipfs-core/src/ipns/index.js index 87fd6b4970..9729cede58 100644 --- a/packages/ipfs-core/src/ipns/index.js +++ b/packages/ipfs-core/src/ipns/index.js @@ -10,11 +10,25 @@ const log = Object.assign(debug('ipfs:ipns'), { const IpnsPublisher = require('./publisher') const IpnsRepublisher = require('./republisher') const IpnsResolver = require('./resolver') -const { normalizePath } = require('../utils') const TLRU = require('../utils/tlru') const defaultRecordTtl = 60 * 1000 +/** + * @typedef {import('libp2p-crypto').PrivateKey} PrivateKey + * @typedef {import('peer-id')} PeerId + */ + class IPNS { + /** + * @param {import('ipfs-core-types/src/basic').BufferStore} routing + * @param {import('interface-datastore').Datastore} datastore + * @param {PeerId} peerId + * @param {import('libp2p/src/keychain')} keychain + * @param {object} options + * @param {string} options.pass + * @param {number} [options.initialBroadcastInterval] + * @param {number} [options.broadcastInterval] + */ constructor (routing, datastore, peerId, keychain, options) { this.publisher = new IpnsPublisher(routing, datastore) this.republisher = new IpnsRepublisher(this.publisher, datastore, peerId, keychain, options) @@ -23,11 +37,15 @@ class IPNS { this.routing = routing } - // Publish + /** + * Publish + * + * @param {PrivateKey} privKey + * @param {Uint8Array} value + * @param {number} lifetime + */ async publish (privKey, value, lifetime = IpnsPublisher.defaultRecordLifetime) { try { - value = normalizePath(value) - const peerId = await createFromPrivKey(privKey.bytes) await this.publisher.publishWithEOL(privKey, value, lifetime) @@ -54,7 +72,14 @@ class IPNS { } } - // Resolve + /** + * Resolve + * + * @param {string} name + * @param {object} options + * @param {boolean} [options.nocache] + * @param {boolean} [options.recursive] + */ async resolve (name, options = {}) { if (typeof name !== 'string') { throw errcode(new Error('name received is not valid'), 'ERR_INVALID_NAME') @@ -84,8 +109,14 @@ class IPNS { } } - // Initialize keyspace - // sets the ipns record for the given key to point to an empty directory + /** + * Initialize keyspace + * + * Sets the ipns record for the given key to point to an empty directory + * + * @param {PrivateKey} privKey + * @param {Uint8Array} value + */ async initializeKeyspace (privKey, value) { // eslint-disable-line require-await return this.publish(privKey, value, IpnsPublisher.defaultRecordLifetime) } diff --git a/packages/ipfs-core/src/ipns/publisher.js b/packages/ipfs-core/src/ipns/publisher.js index 3b66854dc0..7b68f5e923 100644 --- a/packages/ipfs-core/src/ipns/publisher.js +++ b/packages/ipfs-core/src/ipns/publisher.js @@ -8,20 +8,37 @@ const log = Object.assign(debug('ipfs:ipns:publisher'), { error: debug('ipfs:ipns:publisher:error') }) const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayEquals = require('uint8arrays/equals') const ipns = require('ipns') +/** + * @typedef {import('libp2p-crypto').PrivateKey} PrivateKey + * @typedef {import('libp2p-crypto').PublicKey} PublicKey + * @typedef {import('ipns').IPNSEntry} IPNSEntry + */ + const ERR_NOT_FOUND = Errors.notFoundError().code const defaultRecordLifetime = 60 * 60 * 1000 // IpnsPublisher is capable of publishing and resolving names to the IPFS routing system. class IpnsPublisher { + /** + * @param {import('ipfs-core-types/src/basic').BufferStore} routing + * @param {import('interface-datastore').Datastore} datastore + */ constructor (routing, datastore) { this._routing = routing this._datastore = datastore } - // publish record with a eol + /** + * Publish record with a eol + * + * @param {PrivateKey} privKey + * @param {Uint8Array} value + * @param {number} lifetime + */ async publishWithEOL (privKey, value, lifetime) { if (!privKey || !privKey.bytes) { throw errcode(new Error('invalid private key'), 'ERR_INVALID_PRIVATE_KEY') @@ -33,11 +50,20 @@ class IpnsPublisher { return this._putRecordToRouting(record, peerId) } - // Accepts a keypair, as well as a value (ipfsPath), and publishes it out to the routing system + /** + * Accepts a keypair, as well as a value (ipfsPath), and publishes it out to the routing system + * + * @param {PrivateKey} privKey + * @param {Uint8Array} value + */ publish (privKey, value) { return this.publishWithEOL(privKey, value, defaultRecordLifetime) } + /** + * @param {IPNSEntry} record + * @param {PeerId} peerId + */ async _putRecordToRouting (record, peerId) { if (!(PeerId.isPeerId(peerId))) { const errMsg = 'peerId received is not valid' @@ -61,6 +87,10 @@ class IpnsPublisher { return embedPublicKeyRecord || record } + /** + * @param {Key} key + * @param {IPNSEntry} entry + */ async _publishEntry (key, entry) { if (!(Key.isKey(key))) { const errMsg = 'datastore key does not have a valid format' @@ -95,6 +125,10 @@ class IpnsPublisher { } } + /** + * @param {Key} key + * @param {PublicKey} publicKey + */ async _publishPublicKey (key, publicKey) { if ((!Key.isKey(key))) { const errMsg = 'datastore key does not have a valid format' @@ -125,8 +159,15 @@ class IpnsPublisher { } } - // Returns the record this node has published corresponding to the given peer ID. - // If `checkRouting` is true and we have no existing record, this method will check the routing system for any existing records. + /** + * Returns the record this node has published corresponding to the given peer ID. + * + * If `checkRouting` is true and we have no existing record, this method will check the routing system for any existing records. + * + * @param {PeerId} peerId + * @param {object} options + * @param {boolean} [options.checkRouting] + */ async _getPublished (peerId, options = {}) { if (!(PeerId.isPeerId(peerId))) { const errMsg = 'peerId received is not valid' @@ -152,7 +193,7 @@ class IpnsPublisher { } if (!checkRouting) { - throw errcode(err) + throw errcode(err, 'ERR_NOT_FOUND_AND_CHECK_ROUTING_NOT_ENABLED') } // Try to get from routing @@ -170,6 +211,9 @@ class IpnsPublisher { } } + /** + * @param {Uint8Array} data + */ _unmarshalData (data) { try { return ipns.unmarshal(data) @@ -178,7 +222,13 @@ class IpnsPublisher { } } - async _updateOrCreateRecord (privKey, value, validity, peerId) { + /** + * @param {PrivateKey} privKey + * @param {Uint8Array} value + * @param {number} lifetime + * @param {PeerId} peerId + */ + async _updateOrCreateRecord (privKey, value, lifetime, peerId) { if (!(PeerId.isPeerId(peerId))) { const errMsg = 'peerId received is not valid' log.error(errMsg) @@ -207,14 +257,14 @@ class IpnsPublisher { let seqNumber = 0 if (record && record.sequence !== undefined) { - seqNumber = record.value.toString() !== value ? record.sequence + 1 : record.sequence + seqNumber = !uint8ArrayEquals(record.value, value) ? record.sequence + 1 : record.sequence } let entryData try { // Create record - entryData = await ipns.create(privKey, value, seqNumber, validity) + entryData = await ipns.create(privKey, value, seqNumber, lifetime) } catch (err) { const errMsg = `ipns record for ${value} could not be created` diff --git a/packages/ipfs-core/src/ipns/republisher.js b/packages/ipfs-core/src/ipns/republisher.js index 0efccdc954..e419ab8884 100644 --- a/packages/ipfs-core/src/ipns/republisher.js +++ b/packages/ipfs-core/src/ipns/republisher.js @@ -10,6 +10,10 @@ const log = Object.assign(debug('ipfs:ipns:republisher'), { error: debug('ipfs:ipns:republisher:error') }) +/** + * @typedef {import('libp2p-crypto').PrivateKey} PrivateKey + */ + const minute = 60 * 1000 const hour = 60 * minute @@ -17,7 +21,17 @@ const defaultBroadcastInterval = 4 * hour const defaultRecordLifetime = 24 * hour class IpnsRepublisher { - constructor (publisher, datastore, peerId, keychain, options = {}) { + /** + * @param {import('./publisher')} publisher + * @param {import('interface-datastore').Datastore} datastore + * @param {PeerId} peerId + * @param {import('libp2p/src/keychain')} keychain + * @param {object} options + * @param {string} options.pass + * @param {number} [options.initialBroadcastInterval] + * @param {number} [options.broadcastInterval] + */ + constructor (publisher, datastore, peerId, keychain, options = { pass: '' }) { this._publisher = publisher this._datastore = datastore this._peerId = peerId @@ -102,6 +116,10 @@ class IpnsRepublisher { await republishHandle.cancel() } + /** + * @param {PrivateKey} privateKey + * @param {string} pass + */ async _republishEntries (privateKey, pass) { // TODO: Should use list of published entries. // We can't currently *do* that because go uses this method for now. @@ -131,6 +149,9 @@ class IpnsRepublisher { } } + /** + * @param {PrivateKey} privateKey + */ async _republishEntry (privateKey) { if (!privateKey || !privateKey.bytes) { throw errcode(new Error('invalid private key'), 'ERR_INVALID_PRIVATE_KEY') @@ -149,6 +170,9 @@ class IpnsRepublisher { } } + /** + * @param {PeerId} peerId + */ async _getPreviousValue (peerId) { if (!(PeerId.isPeerId(peerId))) { throw errcode(new Error('invalid peer ID'), 'ERR_INVALID_PEER_ID') diff --git a/packages/ipfs-core/src/ipns/resolver.js b/packages/ipfs-core/src/ipns/resolver.js index f57936064d..efdbad6586 100644 --- a/packages/ipfs-core/src/ipns/resolver.js +++ b/packages/ipfs-core/src/ipns/resolver.js @@ -15,10 +15,18 @@ const ERR_NOT_FOUND = Errors.notFoundError().code const defaultMaximumRecursiveDepth = 32 class IpnsResolver { + /** + * @param {import('ipfs-core-types/src/basic').BufferStore} routing + */ constructor (routing) { this._routing = routing } + /** + * @param {string} name + * @param {object} options + * @param {boolean} [options.recursive] + */ async resolve (name, options = {}) { if (typeof name !== 'string') { throw errcode(new Error('invalid name'), 'ERR_INVALID_NAME') @@ -35,7 +43,7 @@ class IpnsResolver { const key = nameSegments[2] // Define a maximum depth if recursive option enabled - let depth + let depth = Infinity if (recursive) { depth = defaultMaximumRecursiveDepth @@ -47,7 +55,13 @@ class IpnsResolver { return res } - // Recursive resolver according to the specified depth + /** + * Recursive resolver according to the specified depth + * + * @param {string} name + * @param {number} depth + * @returns {Promise} + */ async resolver (name, depth) { // Exceeded recursive maximum depth if (depth === 0) { @@ -69,7 +83,11 @@ class IpnsResolver { return this.resolver(nameSegments[2], depth - 1) } - // resolve ipns entries from the provided routing + /** + * Resolve ipns entries from the provided routing + * + * @param {string} name + */ async _resolveName (name) { const peerId = PeerId.createFromCID(name) const { routingKey } = ipns.getIdKeys(peerId.toBytes()) @@ -101,7 +119,12 @@ class IpnsResolver { return this._validateRecord(peerId, ipnsEntry) } - // validate a resolved record + /** + * Validate a resolved record + * + * @param {PeerId} peerId + * @param {import('ipns').IPNSEntry} ipnsEntry + */ async _validateRecord (peerId, ipnsEntry) { const pubKey = await ipns.extractPublicKey(peerId, ipnsEntry) diff --git a/packages/ipfs-core/src/ipns/routing/config.js b/packages/ipfs-core/src/ipns/routing/config.js index 941097e100..033b928d8a 100644 --- a/packages/ipfs-core/src/ipns/routing/config.js +++ b/packages/ipfs-core/src/ipns/routing/config.js @@ -6,6 +6,13 @@ const get = require('dlv') const PubsubDatastore = require('./pubsub-datastore') const OfflineDatastore = require('./offline-datastore') +/** + * @param {object} arg + * @param {import('libp2p')} arg.libp2p + * @param {import('ipfs-repo')} arg.repo + * @param {import('peer-id')} arg.peerId + * @param {object} arg.options + */ module.exports = ({ libp2p, repo, peerId, options }) => { // Setup online routing for IPNS with a tiered routing composed by a DHT and a Pubsub router (if properly enabled) const ipnsStores = [] diff --git a/packages/ipfs-core/src/ipns/routing/offline-datastore.js b/packages/ipfs-core/src/ipns/routing/offline-datastore.js index aef4bbe0b0..c6f3b4c80e 100644 --- a/packages/ipfs-core/src/ipns/routing/offline-datastore.js +++ b/packages/ipfs-core/src/ipns/routing/offline-datastore.js @@ -13,8 +13,12 @@ const log = Object.assign(debug('ipfs:ipns:offline-datastore'), { // Offline datastore aims to mimic the same encoding as routing when storing records // to the local datastore class OfflineDatastore { + /** + * @param {import('ipfs-repo')} repo + */ constructor (repo) { this._repo = repo + this.stores = [] } /** @@ -22,7 +26,6 @@ class OfflineDatastore { * * @param {Uint8Array} key - identifier of the value. * @param {Uint8Array} value - value to be stored. - * @returns {Promise} */ async put (key, value) { // eslint-disable-line require-await if (!(key instanceof Uint8Array)) { @@ -52,7 +55,6 @@ class OfflineDatastore { * Get a value from the local datastore indexed by the received key properly encoded. * * @param {Uint8Array} key - identifier of the value to be obtained. - * @returns {Promise} */ async get (key) { if (!(key instanceof Uint8Array)) { @@ -82,7 +84,11 @@ class OfflineDatastore { return record.value } - // encode key properly - base32(/ipns/{cid}) + /** + * encode key properly - base32(/ipns/{cid}) + * + * @param {Uint8Array} key + */ _routingKey (key) { return new Key('/' + encodeBase32(key), false) } diff --git a/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js b/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js index ae0f02bbe4..925725ee59 100644 --- a/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js +++ b/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js @@ -6,8 +6,6 @@ const PubsubDatastore = require('datastore-pubsub') const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') -const withIs = require('class-is') - const errcode = require('err-code') const debug = require('debug') const log = Object.assign(debug('ipfs:ipns:pubsub'), { @@ -16,8 +14,13 @@ const log = Object.assign(debug('ipfs:ipns:pubsub'), { // Pubsub datastore aims to manage the pubsub subscriptions for IPNS class IpnsPubsubDatastore { + /** + * @param {import('libp2p-interfaces/src/pubsub')} pubsub + * @param {import('interface-datastore').Datastore} localDatastore + * @param {import('peer-id')} peerId + */ constructor (pubsub, localDatastore, peerId) { - this._pubsub = pubsub + /** @type {Record} */ this._subscriptions = {} // Bind _handleSubscriptionKey function, which is called by PubsubDatastore. @@ -28,11 +31,11 @@ class IpnsPubsubDatastore { /** * Put a value to the pubsub datastore indexed by the received key properly encoded. * - * @param {Buffer} key - identifier of the value. - * @param {Buffer} value - value to be stored. - * @returns {Promise} + * @param {Uint8Array} key - identifier of the value. + * @param {Uint8Array} value - value to be stored. */ - async put (key, value) { // eslint-disable-line require-await + put (key, value) { + // @ts-ignore datastores take Key keys, this one takes Uint8Array keys return this._pubsubDs.put(key, value) } @@ -41,14 +44,14 @@ class IpnsPubsubDatastore { * Also, the identifier topic is subscribed to and the pubsub datastore records will be * updated once new publishes occur. * - * @param {Buffer} key - identifier of the value to be obtained. - * @returns {Promise} + * @param {Uint8Array} key - identifier of the value to be obtained. */ async get (key) { let res let err try { + // @ts-ignore datastores take Key keys, this one takes Uint8Array keys res = await this._pubsubDs.get(key) } catch (e) { err = e @@ -74,7 +77,11 @@ class IpnsPubsubDatastore { return res } - // Modify subscription key to have a proper encoding + /** + * Modify subscription key to have a proper encoding + * + * @param {Uint8Array | string} key + */ _handleSubscriptionKey (key) { if (key instanceof Uint8Array) { key = uint8ArrayToString(key, 'base58btc') @@ -99,8 +106,6 @@ class IpnsPubsubDatastore { /** * Get pubsub subscriptions related to ipns. - * - * @returns {string[]} */ getSubscriptions () { const subscriptions = Object.values(this._subscriptions).filter(Boolean) @@ -112,7 +117,6 @@ class IpnsPubsubDatastore { * Cancel pubsub subscriptions related to ipns. * * @param {string} name - ipns path to cancel the pubsub subscription. - * @returns {Promise<{canceled: boolean}>} */ async cancel (name) { // eslint-disable-line require-await if (typeof name !== 'string') { @@ -138,7 +142,7 @@ class IpnsPubsubDatastore { this._pubsubDs.unsubscribe(bufTopic) - this._subscriptions[stringifiedTopic] = undefined + delete this._subscriptions[stringifiedTopic] log(`unsubscribed pubsub ${stringifiedTopic}: ${name}`) return { @@ -147,4 +151,4 @@ class IpnsPubsubDatastore { } } -exports = module.exports = withIs(IpnsPubsubDatastore, { className: 'IpnsPubsubDatastore', symbolName: '@js-ipfs/ipns/IpnsPubsubDatastore' }) +module.exports = IpnsPubsubDatastore diff --git a/packages/ipfs-core/src/ipns/routing/utils.js b/packages/ipfs-core/src/ipns/routing/utils.js index 20f2b65423..e3af891327 100644 --- a/packages/ipfs-core/src/ipns/routing/utils.js +++ b/packages/ipfs-core/src/ipns/routing/utils.js @@ -4,9 +4,20 @@ const ipns = require('ipns') const uint8ArrayToString = require('uint8arrays/to-string') module.exports = { + /** + * @param {Uint8Array} buf + */ encodeBase32: (buf) => uint8ArrayToString(buf, 'base32upper'), validator: { - func: (key, record, cb) => ipns.validator.validate(record, key, cb) + /** + * @param {Uint8Array} key + * @param {Uint8Array} record + */ + func: (key, record) => ipns.validator.validate(record, key) }, + /** + * @param {*} _k + * @param {Uint8Array[]} records + */ selector: (_k, records) => ipns.validator.select(records[0], records[1]) } diff --git a/packages/ipfs-core/src/mfs-preload.js b/packages/ipfs-core/src/mfs-preload.js index e800eff9f4..2f46d3c713 100644 --- a/packages/ipfs-core/src/mfs-preload.js +++ b/packages/ipfs-core/src/mfs-preload.js @@ -6,10 +6,17 @@ const log = Object.assign(debug('ipfs:mfs-preload'), { error: debug('ipfs:mfs-preload:error') }) +/** + * @typedef {PreloadOptions & MFSPreloadOptions} Options + * @typedef {Object} MFSPreloadOptions + * @property {number} [interval] + * @typedef {import('./types').PreloadOptions} PreloadOptions + */ + /** * @param {Object} config - * @param {import('./components').Preload} config.preload - * @param {import('./components').Files} config.files + * @param {import('./types').Preload} config.preload + * @param {import('ipfs-core-types/src/files').API} config.files * @param {Options} [config.options] */ module.exports = ({ preload, files, options = {} }) => { @@ -21,7 +28,8 @@ module.exports = ({ preload, files, options = {} }) => { return { start: noop, stop: noop } } - let rootCid, timeoutId + let rootCid = '' + let timeoutId = 0 const preloadMfs = async () => { try { @@ -58,11 +66,3 @@ module.exports = ({ preload, files, options = {} }) => { } } } - -/** - * @typedef {ReturnType} MFSPreload - * @typedef {PreloadOptions & MFSPreloadOptions} Options - * @typedef {Object} MFSPreloadOptions - * @property {number} [interval] - * @typedef {import('./components').PreloadOptions} PreloadOptions - */ diff --git a/packages/ipfs-core/src/preload.js b/packages/ipfs-core/src/preload.js index d6857ca8ed..45cf645511 100644 --- a/packages/ipfs-core/src/preload.js +++ b/packages/ipfs-core/src/preload.js @@ -1,5 +1,6 @@ 'use strict' +// @ts-ignore const toUri = require('multiaddr-to-uri') const debug = require('debug') const CID = require('cids') @@ -16,7 +17,7 @@ const log = Object.assign( ) /** - * @param {Options & AbortOptions} [options] + * @param {import('./types').PreloadOptions} [options] */ const createPreloader = (options = {}) => { options.enabled = Boolean(options.enabled) @@ -33,6 +34,7 @@ const createPreloader = (options = {}) => { } let stopped = true + /** @type {AbortController[]} */ let requests = [] const apiUris = options.addresses.map(toUri) @@ -40,8 +42,7 @@ const createPreloader = (options = {}) => { const cache = hashlru(options.cache) /** - * @param {string|CID} path - * @returns {Promise} + * @type {import('./types').Preload} */ const api = async path => { try { @@ -65,6 +66,7 @@ const createPreloader = (options = {}) => { for (const uri of fallbackApiUris) { if (stopped) throw new Error(`preload aborted for ${path}`) + /** @type {AbortController} */ let controller try { @@ -108,15 +110,3 @@ const createPreloader = (options = {}) => { } module.exports = createPreloader - -/** - * @typedef {ReturnType} Preload - * - * @typedef {object} Options - * @property {boolean} [enabled = false] - Whether to preload anything - * @property {number} [cache = 1000] - How many CIDs to cache - * @property {string[]} [addresses = []] - Which preload servers to use. - * **NOTE:** nodes specified here should also be added to your node's bootstrap address list at `config.Boostrap`. - * - * @typedef {import('./components').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/src/runtime/dns-browser.js b/packages/ipfs-core/src/runtime/dns-browser.js index 3d2fab2bd4..c66e3934dc 100644 --- a/packages/ipfs-core/src/runtime/dns-browser.js +++ b/packages/ipfs-core/src/runtime/dns-browser.js @@ -16,13 +16,26 @@ const ttl = 60 * 1000 // we don't want preload calls to exhaust the limit (~6) const httpQueue = new PQueue({ concurrency: 4 }) +/** + * @param {{ Path: string, Message: string }} response + */ const ipfsPath = (response) => { if (response.Path) return response.Path throw new Error(response.Message) } +/** + * @param {string} fqdn + * @param {object} opts + */ module.exports = async (fqdn, opts) => { // eslint-disable-line require-await + /** + * @param {string} fqdn + * @param {object} opts + * @param {boolean} [opts.nocache] + */ const resolveDnslink = async (fqdn, opts = {}) => { + // @ts-ignore - URLSearchParams does not take boolean options, only strings const searchParams = new URLSearchParams(opts) searchParams.set('arg', fqdn) diff --git a/packages/ipfs-core/src/runtime/dns-nodejs.js b/packages/ipfs-core/src/runtime/dns-nodejs.js index 3b75c3b2de..68861c8ac7 100644 --- a/packages/ipfs-core/src/runtime/dns-nodejs.js +++ b/packages/ipfs-core/src/runtime/dns-nodejs.js @@ -7,6 +7,11 @@ const { promisify } = require('util') const MAX_RECURSIVE_DEPTH = 32 +/** + * @param {string} domain + * @param {object} opts + * @param {boolean} [opts.recursive] + */ module.exports = (domain, opts) => { // recursive is true by default, it's set to false only if explicitly passed as argument in opts const recursive = opts.recursive == null ? true : Boolean(opts.recursive) @@ -19,7 +24,12 @@ module.exports = (domain, opts) => { return recursiveResolveDnslink(domain, depth) } -async function recursiveResolveDnslink (domain, depth) { +/** + * @param {string} domain + * @param {number} [depth=0] + * @returns {Promise} + */ +async function recursiveResolveDnslink (domain, depth = 0) { if (depth === 0) { throw errcode(new Error('recursion limit exceeded'), 'ERR_DNSLINK_RECURSION_LIMIT') } @@ -57,6 +67,9 @@ async function recursiveResolveDnslink (domain, depth) { return recursiveResolveDnslink(domainOrCID, depth - 1) } +/** + * @param {string} domain + */ async function resolveDnslink (domain) { const DNSLINK_REGEX = /^dnslink=.+$/ const records = await promisify(dns.resolveTxt)(domain) diff --git a/packages/ipfs-core/src/runtime/init-assets-nodejs.js b/packages/ipfs-core/src/runtime/init-assets-nodejs.js index 356ef6f5e6..cf6b1c8b48 100644 --- a/packages/ipfs-core/src/runtime/init-assets-nodejs.js +++ b/packages/ipfs-core/src/runtime/init-assets-nodejs.js @@ -4,12 +4,23 @@ const path = require('path') const globSource = require('ipfs-utils/src/files/glob-source') const all = require('it-all') -// Add the default assets to the repo. +/** + * Add the default assets to the repo. + * + * @param {object} arg + * @param {import('ipfs-core-types/src/root').API["addAll"]} arg.addAll + * @param {(msg: string) => void} arg.print + */ module.exports = async function initAssets ({ addAll, print }) { const initDocsPath = path.join(__dirname, '..', 'init-files', 'init-docs') const results = await all(addAll(globSource(initDocsPath, { recursive: true }), { preload: false })) + const dir = results.filter(file => file.path === 'init-docs').pop() + if (!dir) { + return + } + print('to get started, enter:\n') print(`\tjsipfs cat /ipfs/${dir.cid}/readme\n`) } diff --git a/packages/ipfs-core/src/runtime/ipld.js b/packages/ipfs-core/src/runtime/ipld.js index fd942f888c..61e51d6977 100644 --- a/packages/ipfs-core/src/runtime/ipld.js +++ b/packages/ipfs-core/src/runtime/ipld.js @@ -3,7 +3,15 @@ const mergeOptions = require('merge-options') const multicodec = require('multicodec') -// All known (non-default) IPLD formats +/** + * @typedef {import('interface-ipld-format').Format} IPLDFormat + */ + +/** + * All known (non-default) IPLD formats + * + * @type {Record} + */ const IpldFormats = { get [multicodec.DAG_PB] () { return require('ipld-dag-pb') @@ -16,16 +24,23 @@ const IpldFormats = { } } -module.exports = (blockService, options = {}, log) => { +/** + * @param {import('ipfs-block-service')} blockService + * @param {import('ipld').Options} [options] + */ +module.exports = (blockService, options) => { return mergeOptions.call( // ensure we have the defaults formats even if the user overrides `formats: []` { concatArrays: true }, { blockService: blockService, + formats: [], + /** + * @type {import('ipld').LoadFormatFn} + */ loadFormat: (codec) => { - log('Loading IPLD format', codec) if (IpldFormats[codec]) { - return IpldFormats[codec] + return Promise.resolve(IpldFormats[codec]) } else { throw new Error(`Missing IPLD format "${multicodec.getName(codec)}"`) } diff --git a/packages/ipfs-core/src/runtime/libp2p-browser.js b/packages/ipfs-core/src/runtime/libp2p-browser.js index b7c9680c4f..48d4cab843 100644 --- a/packages/ipfs-core/src/runtime/libp2p-browser.js +++ b/packages/ipfs-core/src/runtime/libp2p-browser.js @@ -1,7 +1,10 @@ 'use strict' +// @ts-ignore - no types const WS = require('libp2p-websockets') +// @ts-ignore - no types const WebRTCStar = require('libp2p-webrtc-star') +// @ts-ignore - no types const Multiplex = require('libp2p-mplex') const { NOISE } = require('libp2p-noise') const KadDHT = require('libp2p-kad-dht') diff --git a/packages/ipfs-core/src/runtime/libp2p-nodejs.js b/packages/ipfs-core/src/runtime/libp2p-nodejs.js index cb72c3cfe8..6336d2455b 100644 --- a/packages/ipfs-core/src/runtime/libp2p-nodejs.js +++ b/packages/ipfs-core/src/runtime/libp2p-nodejs.js @@ -1,10 +1,14 @@ 'use strict' +// @ts-ignore - no types const TCP = require('libp2p-tcp') +// @ts-ignore - no types const MulticastDNS = require('libp2p-mdns') +// @ts-ignore - no types const WS = require('libp2p-websockets') const KadDHT = require('libp2p-kad-dht') const GossipSub = require('libp2p-gossipsub') +// @ts-ignore - no types const Multiplex = require('libp2p-mplex') const { NOISE } = require('libp2p-noise') const ipnsUtils = require('../ipns/routing/utils') diff --git a/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js b/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js index 0af111e881..cf329268b7 100644 --- a/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js +++ b/packages/ipfs-core/src/runtime/libp2p-pubsub-routers-nodejs.js @@ -2,5 +2,6 @@ module.exports = { gossipsub: require('libp2p-gossipsub'), + // @ts-ignore - no types floodsub: require('libp2p-floodsub') } diff --git a/packages/ipfs-core/src/runtime/preload-browser.js b/packages/ipfs-core/src/runtime/preload-browser.js index 5005982b62..a7f8fc5091 100644 --- a/packages/ipfs-core/src/runtime/preload-browser.js +++ b/packages/ipfs-core/src/runtime/preload-browser.js @@ -13,6 +13,10 @@ const log = Object.assign(debug('ipfs:preload'), { // we don't want preload calls to exhaust the limit (~6) const httpQueue = new PQueue({ concurrency: 4 }) +/** + * @param {string} url + * @param {import('ipfs-core-types/src/basic').AbortOptions} options + */ module.exports = function preload (url, options = {}) { log(url) diff --git a/packages/ipfs-core/src/runtime/preload-nodejs.js b/packages/ipfs-core/src/runtime/preload-nodejs.js index 0cf65212c8..94621a8829 100644 --- a/packages/ipfs-core/src/runtime/preload-nodejs.js +++ b/packages/ipfs-core/src/runtime/preload-nodejs.js @@ -7,6 +7,10 @@ const log = Object.assign(debug('ipfs:preload'), { error: debug('ipfs:preload:error') }) +/** + * @param {string} url + * @param {import('ipfs-core-types/src/basic').AbortOptions} options + */ module.exports = async function preload (url, options = {}) { log(url) diff --git a/packages/ipfs-core/src/runtime/repo-browser.js b/packages/ipfs-core/src/runtime/repo-browser.js index ad13c85d93..576257a65f 100644 --- a/packages/ipfs-core/src/runtime/repo-browser.js +++ b/packages/ipfs-core/src/runtime/repo-browser.js @@ -2,7 +2,13 @@ const IPFSRepo = require('ipfs-repo') -module.exports = (options = {}) => { +/** + * @param {import('../types').Print} print + * @param {object} options + * @param {string} [options.path] + * @param {boolean} options.autoMigrate + */ +module.exports = (print, options) => { const repoPath = options.path || 'ipfs' return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/packages/ipfs-core/src/runtime/repo-nodejs.js b/packages/ipfs-core/src/runtime/repo-nodejs.js index 78bae2d565..f37f3772e9 100644 --- a/packages/ipfs-core/src/runtime/repo-nodejs.js +++ b/packages/ipfs-core/src/runtime/repo-nodejs.js @@ -5,33 +5,35 @@ const IPFSRepo = require('ipfs-repo') const path = require('path') /** - * @param {Object} [options] + * @param {import('../types').Print} print + * @param {object} options * @param {string} [options.path] - * @param {boolean} [options.silent] - * @param {boolean} [options.autoMigrate] - * @returns {Repo} + * @param {boolean} options.autoMigrate */ -module.exports = (options = {}) => { +module.exports = (print, options) => { const repoPath = options.path || path.join(os.homedir(), '.jsipfs') - let lastMigration = null + /** + * @type {number} + */ + let lastMigration + /** + * @param {number} version + * @param {string} percentComplete + * @param {string} message + */ const onMigrationProgress = (version, percentComplete, message) => { if (version !== lastMigration) { lastMigration = version - console.info(`Migrating repo from v${version - 1} to v${version}`) // eslint-disable-line no-console + print(`Migrating repo from v${version - 1} to v${version}`) } - console.info(`${percentComplete.toString().padStart(6, ' ')}% ${message}`) // eslint-disable-line no-console + print(`${percentComplete.toString().padStart(6, ' ')}% ${message}`) } return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate, - onMigrationProgress: options.silent ? null : onMigrationProgress + onMigrationProgress: onMigrationProgress }) } - -/** - * @typedef {import('ipfs-core-types/src/repo').Repo} Repo - * @typedef {import('../components/config').IPFSConfig} IPFSConfig - */ diff --git a/packages/ipfs-core/src/types.d.ts b/packages/ipfs-core/src/types.d.ts new file mode 100644 index 0000000000..db57c13c43 --- /dev/null +++ b/packages/ipfs-core/src/types.d.ts @@ -0,0 +1,229 @@ +import type { KeyType } from 'libp2p-crypto' +import type PeerId from 'peer-id' +import type { IPFSConfig } from 'ipfs-core-types/src/config' +import type { ProfileNames } from 'ipfs-core-types/src/config/profiles' +import type IPLD from 'ipld' +import type { Options as IPLDOptions } from 'ipld' +import type Libp2p from 'libp2p' +import type { Libp2pConfig as Libp2pOptions } from 'libp2p' +import type IPFSRepo from 'ipfs-repo' +import type { ProgressCallback as MigrationProgressCallback } from 'ipfs-repo-migrations' +import type { Datastore } from 'interface-datastore' +import type Network, { Options as NetworkOptions } from './components/network' + +export interface Options { + /** + * Initialization options of the IPFS node. + * Note that *initializing* a repo is different from creating an instance of + * [`ipfs-repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor + * sets many special properties when initializing a repo, so you should usually + * not try and call `repoInstance.init()` yourself. + */ + init?: InitOptions + + /** + * If `false`, do not automatically start the IPFS node. Instead, you’ll need to manually call + * [`node.start()`](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs/docs/MODULE.md#nodestart) + * yourself. + */ + start?: boolean + + /** + * A passphrase to encrypt/decrypt keys stored in your keychain + */ + pass?: string + + /** + * Configure circuit relay (see the [circuit relay tutorial](https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) + * to learn more) + */ + relay?: RelayOptions + + /** + * Run ipfs node offline. The node does not connect to the rest of the network + * but APIs that do not require network access will still work. + */ + offline?: boolean + + /** + * Configure remote preload nodes. The remote will preload content added on this node, + * and also attempt to preload objects requested by this node. + */ + preload?: PreloadOptions + + /** + * Enable and configure experimental features + */ + EXPERIMENTAL?: ExperimentalOptions + + /** + * Modify the default IPFS node config. This object will be *merged* with the default config; it will not replace it. + * (Default: [`config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-nodejs.js) + * in Node.js, [`config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-browser.js) + * in browsers) + */ + config?: IPFSConfig + + /** + * If multiple instances of IPFS are accessing the same repo - e.g. via node cluster or browser UI and webworkers + * one instance must be designated the repo owner to hold the lock on shared resources like the datastore. + * + * Set this property to true on one instance only if this is how your application is set up. + */ + repoOwner?: boolean + + /** + * The file path at which to store the IPFS node’s data. Alternatively, you can set up a customized + * storage system by providing an Repo implementation. (In browser default is 'ipfs'). + */ + repo?: IPFSRepo | string + + /** + * Occasionally a repo migration is necessary - pass true here to to this automatically at startup + * when a new version of IPFS is being run for the first time and a migration is necssary, otherwise + * the node will refuse to start + */ + repoAutoMigrate?: boolean + + /** + * Pass a function here to be notified of progress when a repo migration is taking place + */ + onMigrationProgress?: MigrationProgressCallback + + /** + * Modify the default IPLD config. This object + * will be *merged* with the default config; it will not replace it. Check IPLD + * [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information + * on the available options. (Default: [`ipld.js`] + * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) + * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld.js) + * in browsers) + */ + ipld?: IPLDOptions + + /** + * The libp2p option allows you to build + * your libp2p node by configuration, or via a bundle function. If you are + * looking to just modify the below options, using the object format is the + * quickest way to get the default features of libp2p. If you need to create a + * more customized libp2p node, such as with custom transports or peer/content + * routers that need some of the ipfs data on startup, a custom bundle is a + * great way to achieve this. + * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). + * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) + * for the list of options libp2p supports. + * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) + * in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in + * browsers. + */ + libp2p?: Libp2pOptions | Libp2pFactoryFn + + silent?: boolean +} + +export type Libp2pFactoryFn = ({ libp2pOptions: Libp2pOptions, options: Options, config: IPFSConfig, datastore: Datastore, peerId: PeerId }) => Libp2p + +/** + * On first run js-IPFS will initialize a repo which can be customized through this settings + */ +export interface InitOptions { + /** + * Whether to remove built-in assets, like the instructional tour and empty mutable file system, from the repo + */ + emptyRepo?: boolean + + /** + * The type of key to use + */ + algorithm?: KeyType + + /** + * Number of bits to use in the generated key pair (rsa only) + */ + bits?: number + + /** + * A pre-generated private key to use + * **NOTE: This overrides `bits`.** + */ + privateKey?: PeerId|string + + /** + * Apply profile settings to config + */ + profiles?: ProfileNames[] + + /** + * Set to `false` to disallow initialization if the repo does not already exist + */ + allowNew?: boolean +} + +export interface RelayOptions { + /** + * Enable circuit relay dialer and listener. (Default: `true`) + */ + enabled?: boolean + + hop?: { + /** + * Make this node a relay (other nodes can connect *through* it). (Default: `false`) + */ + enabled?: boolean + + /** + * Make this an active relay node. Active relay nodes will attempt to dial a destination peer even if that peer is not yet connected to the relay. (Default: false) + */ + active?: boolean + } +} + +export interface PreloadOptions { + /** + * Whether to preload anything + */ + enabled?: boolean + + /** + * How many CIDs to cache + */ + cache?: number + + /** + * Which preload servers to use. **NOTE:** nodes specified here should also be added to your node's + * bootstrap address list at `config.Boostrap` + */ + addresses?: string[] +} + +export interface ExperimentalOptions { + /** + * Enable pub-sub on IPNS. (Default: `false`) + */ + ipnsPubsub?: boolean + + /** + * Enable directory sharding. Directories that have many child objects will be represented by multiple + * DAG nodes instead of just one. It can improve lookup performance when a directory has several + * thousand files or more. (Default: `false`) + */ + sharding?: boolean +} + +/** + * Prints output to the console + */ +export type Print = (...args:any[]) => void + +export interface Preload { + (cid: CID): void + start: () => void + stop: () => void +} + +export interface MfsPreload { + start: () => void + stop: () => void +} + +export type NetworkService = Service diff --git a/packages/ipfs-core/src/utils.js b/packages/ipfs-core/src/utils.js index c32e3abaf0..c5e90b240c 100644 --- a/packages/ipfs-core/src/utils.js +++ b/packages/ipfs-core/src/utils.js @@ -9,6 +9,11 @@ const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') const withTimeoutOption = require('ipfs-core-utils/src/with-timeout-option') /** @type {typeof Object.assign} */ const mergeOptions = require('merge-options') +const last = require('it-last') + +/** + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ exports.mergeOptions = mergeOptions @@ -38,6 +43,7 @@ const normalizePath = (pathStr) => { } // TODO: do we need both normalizePath and normalizeCidPath? +// TODO: don't forget ipfs-core-utils/src/to-cid-and-path /** * @param {Uint8Array|CID|string} path * @returns {string} @@ -68,12 +74,12 @@ const normalizeCidPath = (path) => { * - /ipfs//link/to/pluto * - multihash Buffer * - * @param {import('./components').DagReader} dag + * @param {import('ipld')} ipld * @param {CID | string} ipfsPath - A CID or IPFS path * @param {Object} [options] - Optional options passed directly to dag.resolve * @returns {Promise} */ -const resolvePath = async function (dag, ipfsPath, options = {}) { +const resolvePath = async function (ipld, ipfsPath, options = {}) { if (isIpfs.cid(ipfsPath)) { // @ts-ignore - CID|string seems to confuse typedef return new CID(ipfsPath) @@ -88,27 +94,24 @@ const resolvePath = async function (dag, ipfsPath, options = {}) { return cid } - const result = await dag.resolve(cid, { - ...options, - path - }) + const result = await last(ipld.resolve(cid, path, options)) + + if (!result) { + throw new Error('Not found') + } return result.cid } /** - * @typedef {import('ipfs-core-types/src/files').InputFile} InputFile - * @typedef {import('ipfs-core-types/src/files').UnixFSFile} UnixFSFile - * @typedef {import('ipfs-core-types/src/files').IPFSEntry} IPFSEntry - * @typedef {import('ipfs-core-types/src').AbortOptions} AbortOptions + * @typedef {import('ipfs-unixfs-exporter').UnixFSEntry} UnixFSEntry * - * @param {InputFile|UnixFSFile} file + * @param {UnixFSEntry} file * @param {Object} [options] * @param {boolean} [options.includeContent] - * @returns {IPFSEntry} */ const mapFile = (file, options = {}) => { - /** @type {IPFSEntry} */ + /** @type {import('ipfs-core-types/src/root').IPFSEntry} */ const output = { cid: file.cid, path: file.path, @@ -118,11 +121,11 @@ const mapFile = (file, options = {}) => { type: 'file' } - if (file.unixfs) { + if (file.type === 'file' || file.type === 'directory') { // @ts-ignore - TS type can't be changed from File to Directory - output.type = file.unixfs.type === 'directory' ? 'dir' : 'file' + output.type = file.type === 'directory' ? 'dir' : 'file' - if (file.unixfs.type === 'file') { + if (file.type === 'file') { output.size = file.unixfs.fileSize() if (options.includeContent) { @@ -132,7 +135,10 @@ const mapFile = (file, options = {}) => { } output.mode = file.unixfs.mode - output.mtime = file.unixfs.mtime + + if (file.unixfs.mtime !== undefined) { + output.mtime = file.unixfs.mtime + } } return output diff --git a/packages/ipfs-core/src/utils/service.js b/packages/ipfs-core/src/utils/service.js index 33473d9e2e..6a8f55b45d 100644 --- a/packages/ipfs-core/src/utils/service.js +++ b/packages/ipfs-core/src/utils/service.js @@ -3,6 +3,35 @@ const { NotStartedError, AlreadyStartingError, AlreadyStartedError } = require('../errors') const { withTimeout } = require('../utils') +/** + * @template T + * @typedef {import('ipfs-core-types/src/basic').Await} Await + */ +/** + * @template {(options:any) => any} T + * @typedef {Parameters[0]} Options + */ +/** + * @template {(options:any) => any} T + * @typedef {ReturnType extends ? Promise ? U : ReturnType} State + */ +/** + * Represents service state which can be not started in which case + * it is instance of `Error`. Pending in which case it's promise or + * ready in which case it is the value itself. + * + * @template T + * @typedef {{ status: 'stopped' } + * | { status: 'starting', ready: Await } + * | { status: 'started', value: T } + * | { status: 'stopping', ready: Await } + * } ServiceState + */ + +/** + * @typedef {import('ipfs-core-types/src/basic').AbortOptions} AbortOptions + */ + /** * @template Options, T * @@ -209,31 +238,3 @@ class Service { } } module.exports = Service - -/** - * @template T - * @typedef {import('ipfs-core-types/src/basic').Await} Await - */ -/** - * @template {(options:any) => any} T - * @typedef {Parameters[0]} Options - */ -/** - * @template {(options:any) => any} T - * @typedef {ReturnType extends ? Promise ? U : ReturnType} State - */ -/** - * Represents service state which can be not started in which case - * it is instanceof `Error`. Pending in which case it's promise or - * ready in which case it is the value itself. - * - * @template T - * @typedef {{ status: 'stopped' } - * | { status: 'starting', ready: Await } - * | { status: 'started', value: T } - * | { status: 'stopping', ready: Await } - * } ServiceState - */ -/** - * @typedef {import('../utils').AbortOptions} AbortOptions - */ diff --git a/packages/ipfs-core/tsconfig.json b/packages/ipfs-core/tsconfig.json index 6264673867..644f3a5ef7 100644 --- a/packages/ipfs-core/tsconfig.json +++ b/packages/ipfs-core/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-daemon/tsconfig.json b/packages/ipfs-daemon/tsconfig.json index 2111e28905..f7df24adbc 100644 --- a/packages/ipfs-daemon/tsconfig.json +++ b/packages/ipfs-daemon/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-grpc-client/package.json b/packages/ipfs-grpc-client/package.json index 20ccec3225..b5bc022623 100644 --- a/packages/ipfs-grpc-client/package.json +++ b/packages/ipfs-grpc-client/package.json @@ -30,7 +30,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "npm run build", "build": "aegir build", "test": "aegir test", "lint": "aegir lint", @@ -43,14 +42,14 @@ "change-case": "^4.1.1", "cids": "^1.1.5", "debug": "^4.1.1", - "err-code": "^2.0.3", + "err-code": "^3.0.1", "ipfs-core-utils": "^0.7.2", "ipfs-grpc-protocol": "^0.2.0", "it-first": "^1.0.4", "it-pushable": "^1.4.0", "multiaddr": "^8.0.0", "protobufjs": "^6.10.2", - "wherearewe": "0.0.1", + "wherearewe": "1.0.0", "ws": "^7.3.1" }, "devDependencies": { diff --git a/packages/ipfs-grpc-client/tsconfig.json b/packages/ipfs-grpc-client/tsconfig.json index a42a1aac5d..37871ad4bb 100644 --- a/packages/ipfs-grpc-client/tsconfig.json +++ b/packages/ipfs-grpc-client/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-grpc-server/tsconfig.json b/packages/ipfs-grpc-server/tsconfig.json index 979a39adab..8a6208361c 100644 --- a/packages/ipfs-grpc-server/tsconfig.json +++ b/packages/ipfs-grpc-server/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index dd2ad3bac4..7043938912 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -35,7 +35,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "npm run build", "build": "aegir build", "test": "aegir test", "test:node": "aegir test -t node", @@ -56,14 +55,14 @@ "bignumber.js": "^9.0.1", "cids": "^1.1.5", "debug": "^4.1.1", - "form-data": "^3.0.0", + "form-data": "^4.0.0", "ipfs-core-types": "^0.3.1", "ipfs-core-utils": "^0.7.2", "ipfs-utils": "^6.0.1", "ipld-block": "^0.11.0", - "ipld-dag-cbor": "^0.17.0", - "ipld-dag-pb": "^0.20.0", - "ipld-raw": "^6.0.0", + "ipld-dag-cbor": "^0.18.0", + "ipld-dag-pb": "^0.22.0", + "ipld-raw": "^7.0.0", "it-last": "^1.0.4", "it-map": "^1.0.4", "it-tar": "^1.2.2", @@ -81,7 +80,7 @@ }, "devDependencies": { "aegir": "^31.0.0", - "delay": "^4.4.0", + "delay": "^5.0.0", "go-ipfs": "0.8.0", "ipfs-core": "^0.5.4", "ipfsd-ctl": "^7.2.0", diff --git a/packages/ipfs-http-client/src/object/put.js b/packages/ipfs-http-client/src/object/put.js index abf6b203ff..31b0216c8c 100644 --- a/packages/ipfs-http-client/src/object/put.js +++ b/packages/ipfs-http-client/src/object/put.js @@ -13,6 +13,7 @@ const uint8ArrayFromString = require('uint8arrays/from-string') module.exports = configure(api => { return async (obj, options = {}) => { let tmpObj = { + /** @type {{ Name: string, Hash: string, Size: number }[]} */ Links: [] } @@ -23,7 +24,7 @@ module.exports = configure(api => { Links: [] } } - } else if (DAGNode.isDAGNode(obj)) { + } else if (obj instanceof DAGNode) { tmpObj = { Data: unit8ArrayToString(obj.Data), Links: obj.Links.map(l => ({ diff --git a/packages/ipfs-http-client/tsconfig.json b/packages/ipfs-http-client/tsconfig.json index bbdcd5851e..37871ad4bb 100644 --- a/packages/ipfs-http-client/tsconfig.json +++ b/packages/ipfs-http-client/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", @@ -9,10 +10,10 @@ ], "references": [ { - "path": "../ipfs-core-utils" + "path": "../ipfs-core" }, { - "path": "../ipfs-core" + "path": "../ipfs-core-utils" } ] } diff --git a/packages/ipfs-http-gateway/package.json b/packages/ipfs-http-gateway/package.json index d85a8bb5bc..772f8ec12c 100644 --- a/packages/ipfs-http-gateway/package.json +++ b/packages/ipfs-http-gateway/package.json @@ -35,7 +35,7 @@ "hapi-pino": "^8.3.0", "ipfs-core-utils": "^0.7.2", "ipfs-http-response": "^0.6.0", - "is-ipfs": "^2.0.0", + "is-ipfs": "^4.0.0", "it-last": "^1.0.4", "it-to-stream": "^0.1.2", "joi": "^17.2.1", diff --git a/packages/ipfs-http-gateway/tsconfig.json b/packages/ipfs-http-gateway/tsconfig.json index bbdcd5851e..37871ad4bb 100644 --- a/packages/ipfs-http-gateway/tsconfig.json +++ b/packages/ipfs-http-gateway/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", @@ -9,10 +10,10 @@ ], "references": [ { - "path": "../ipfs-core-utils" + "path": "../ipfs-core" }, { - "path": "../ipfs-core" + "path": "../ipfs-core-utils" } ] } diff --git a/packages/ipfs-http-server/package.json b/packages/ipfs-http-server/package.json index b5a11e8171..ba8e86c24b 100644 --- a/packages/ipfs-http-server/package.json +++ b/packages/ipfs-http-server/package.json @@ -34,12 +34,12 @@ "cids": "^1.1.5", "debug": "^4.1.1", "dlv": "^1.1.3", - "err-code": "^2.0.3", + "err-code": "^3.0.1", "hapi-pino": "^8.3.0", "ipfs-core-utils": "^0.7.2", "ipfs-http-gateway": "^0.3.2", - "ipfs-unixfs": "^2.0.3", - "ipld-dag-pb": "^0.20.0", + "ipfs-unixfs": "https://gitpkg.now.sh/ipfs/js-ipfs-unixfs/packages/ipfs-unixfs?fix/declare-interface-types-in-d-ts", + "ipld-dag-pb": "^0.22.1", "it-all": "^1.0.4", "it-drain": "^1.0.3", "it-first": "^1.0.4", @@ -65,7 +65,7 @@ }, "devDependencies": { "aegir": "^31.0.0", - "form-data": "^3.0.0", + "form-data": "^4.0.0", "ipfs-http-client": "^49.0.4", "iso-random-stream": "^1.1.1", "it-to-buffer": "^1.0.2", diff --git a/packages/ipfs-http-server/test/inject/object.js b/packages/ipfs-http-server/test/inject/object.js index b5911322ee..f69de2ca90 100644 --- a/packages/ipfs-http-server/test/inject/object.js +++ b/packages/ipfs-http-server/test/inject/object.js @@ -11,7 +11,7 @@ const testHttpMethod = require('../utils/test-http-method') const http = require('../utils/http') const sinon = require('sinon') const CID = require('cids') -const UnixFS = require('ipfs-unixfs') +const { UnixFS } = require('ipfs-unixfs') const { AbortSignal } = require('native-abort-controller') const { DAGNode, @@ -555,7 +555,7 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.property('result', emptyDirectoryNode.Data) + expect(res).to.have.deep.property('rawPayload', emptyDirectoryNode.Data) }) it('accepts a timeout', async () => { @@ -570,7 +570,7 @@ describe('/object', () => { }, { ipfs }) expect(res).to.have.property('statusCode', 200) - expect(res).to.have.property('result', emptyDirectoryNode.Data) + expect(res).to.have.deep.property('rawPayload', emptyDirectoryNode.Data) }) }) diff --git a/packages/ipfs-http-server/tsconfig.json b/packages/ipfs-http-server/tsconfig.json index 2e66dcb8a7..c56d0d26c5 100644 --- a/packages/ipfs-http-server/tsconfig.json +++ b/packages/ipfs-http-server/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-message-port-client/package.json b/packages/ipfs-message-port-client/package.json index 906a189b74..692edb74d4 100644 --- a/packages/ipfs-message-port-client/package.json +++ b/packages/ipfs-message-port-client/package.json @@ -30,7 +30,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "npm run build", "build": "aegir build", "test": "echo 'Only interface tests live here'", "test:interface:message-port-client": "aegir test -t browser --bail -f ./test/interface-message-port-client.js", diff --git a/packages/ipfs-message-port-client/tsconfig.json b/packages/ipfs-message-port-client/tsconfig.json index ad2457a225..a3988c1e5d 100644 --- a/packages/ipfs-message-port-client/tsconfig.json +++ b/packages/ipfs-message-port-client/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-message-port-protocol/package.json b/packages/ipfs-message-port-protocol/package.json index f456322d21..59e9c5c318 100644 --- a/packages/ipfs-message-port-protocol/package.json +++ b/packages/ipfs-message-port-protocol/package.json @@ -40,6 +40,7 @@ }, "dependencies": { "cids": "^1.1.5", + "ipfs-core-types": "^0.3.1", "ipld-block": "^0.11.0" }, "devDependencies": { diff --git a/packages/ipfs-message-port-protocol/tsconfig.json b/packages/ipfs-message-port-protocol/tsconfig.json index 979a39adab..8a6208361c 100644 --- a/packages/ipfs-message-port-protocol/tsconfig.json +++ b/packages/ipfs-message-port-protocol/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs-message-port-server/package.json b/packages/ipfs-message-port-server/package.json index 6e97e9c54d..c01f5e7959 100644 --- a/packages/ipfs-message-port-server/package.json +++ b/packages/ipfs-message-port-server/package.json @@ -32,7 +32,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "npm run build", "build": "aegir build", "test": "aegir test", "test:browser": "aegir test -t browser", @@ -45,6 +44,7 @@ "dep-check": "aegir dep-check -i rimraf" }, "dependencies": { + "ipfs-core-types": "^0.3.1", "ipfs-message-port-protocol": "^0.6.1", "it-all": "^1.0.4" }, diff --git a/packages/ipfs-message-port-server/src/block.js b/packages/ipfs-message-port-server/src/block.js index 0e19ec581a..295963cd7d 100644 --- a/packages/ipfs-message-port-server/src/block.js +++ b/packages/ipfs-message-port-server/src/block.js @@ -9,7 +9,7 @@ const { } = require('ipfs-message-port-protocol/src/block') /** - * @typedef {import('./ipfs').IPFS} IPFS + * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {import('cids')} CID * @typedef {import('ipfs-message-port-protocol/src/error').EncodedError} EncodedError * @typedef {import('ipfs-message-port-protocol/src/block').Block} Block @@ -17,6 +17,7 @@ const { * @typedef {import('ipfs-message-port-protocol/src/block').EncodedBlock} EncodedBlock * @typedef {RmEntry} Rm * @typedef {StatResult} Stat + * @typedef {import('ipfs-core-types/src/block').PutOptions} PutOptions */ exports.BlockService = class BlockService { @@ -56,17 +57,10 @@ exports.BlockService = class BlockService { * @typedef {Object} PutQuery * @property {EncodedBlock|Uint8Array} block * @property {EncodedCID|undefined} [cid] - * @property {string} [format] - * @property {string} [mhtype] - * @property {number} [mhlen] - * @property {number} [version] - * @property {boolean} [pin] - * @property {number} [timeout] - * @property {AbortSignal} [signal] * * Stores input as an IPFS block. * - * @param {PutQuery} query + * @param {PutOptions & PutQuery} query * @returns {Promise} */ async put (query) { diff --git a/packages/ipfs-message-port-server/src/core.js b/packages/ipfs-message-port-server/src/core.js index dea2eb81a1..9b3eec9bcf 100644 --- a/packages/ipfs-message-port-server/src/core.js +++ b/packages/ipfs-message-port-server/src/core.js @@ -10,7 +10,10 @@ const { const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') /** - * @typedef {import("./ipfs").IPFS} IPFS + * @typedef {import('cids').CIDVersion} CIDVersion + * @typedef {import('ipfs-core-types').IPFS} IPFS + * @typedef {import('ipfs-core-types/src/root').AddOptions} AddOptions + * @typedef {import('ipfs-core-types/src/root').AddAllOptions} AddAllOptions * @typedef {import("ipfs-message-port-protocol/src/data").Time} Time * @typedef {import("ipfs-message-port-protocol/src/data").UnixFSTime} UnixFSTime * @typedef {import("ipfs-message-port-protocol/src/data").Mode} Mode @@ -22,6 +25,8 @@ const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') * @typedef {import('./ipfs').FileContent} DecodedFileContent * @typedef {import('./ipfs').FileInput} DecodedFileInput * @typedef {import('./ipfs').LsEntry} LsEntry + * @typedef {import('ipfs-unixfs-importer').ImportResult} ImportResult + * @typedef {import('ipfs-unixfs-importer').ImportCandidate} ImportCandidate */ /** @@ -34,29 +39,16 @@ const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') */ /** - * @typedef {Object} AddOptions - * @property {string} [chunker] - * @property {number} [cidVersion] - * @property {boolean} [enableShardingExperiment] - * @property {HashAlg} [hashAlg] - * @property {boolean} [onlyHash] - * @property {boolean} [pin] - * @property {RemoteCallback|void} [progress] - * @property {boolean} [rawLeaves] - * @property {number} [shardSplitThreshold] - * @property {boolean} [trickle] - * @property {boolean} [wrapWithDirectory] - * @property {number} [timeout] - * @property {AbortSignal} [signal] - * * @typedef {Object} AddAllInput * @property {MultiFileInput} input + * @property {RemoteCallback} progress * * @typedef {Object} AddInput * @property {SingleFileInput} input + * @property {RemoteCallback} progress * * @typedef {AddInput & AddOptions} AddQuery - * @typedef {AddAllInput & AddOptions} AddAllQuery + * @typedef {AddAllInput & AddAllOptions} AddAllQuery * * @typedef {ArrayBuffer|ArrayBufferView|Blob|string|FileInput|RemoteIterable} SingleFileInput * @typedef {RemoteIterable} MultiFileInput @@ -133,9 +125,11 @@ exports.CoreService = class CoreService { if (progress) { const fn = decodeCallback(progress) - progressCallback = (bytes, fileName) => fn([bytes, fileName]) + /** @type {import('ipfs-core-types/src/root').AddProgressFn} */ + progressCallback = (bytes, fileName) => { fn([bytes, fileName]) } } + /** @type {AddAllOptions} */ const options = { chunker, cidVersion, @@ -169,13 +163,11 @@ exports.CoreService = class CoreService { const { chunker, cidVersion, - enableShardingExperiment, hashAlg, onlyHash, pin, progress, rawLeaves, - shardSplitThreshold, trickle, wrapWithDirectory, timeout, @@ -186,18 +178,18 @@ exports.CoreService = class CoreService { if (progress) { const fn = decodeCallback(progress) - progressCallback = (bytes, fileName) => fn([bytes, fileName]) + /** @type {import('ipfs-core-types/src/root').AddProgressFn} */ + progressCallback = (bytes, fileName) => { fn([bytes, fileName]) } } + /** @type {AddOptions} */ const options = { chunker, cidVersion, - enableShardingExperiment, hashAlg, onlyHash, pin, rawLeaves, - shardSplitThreshold, trickle, wrapWithDirectory, timeout, @@ -319,7 +311,7 @@ const matchInput = (input, decode) => { } /** - * @param {AsyncIterable} out + * @param {AsyncIterable} out * @returns {AddAllResult} */ const encodeAddAllResult = out => { @@ -332,7 +324,7 @@ const encodeAddAllResult = out => { } /** - * @param {FileOutput} out + * @param {ImportResult} out * @returns {AddResult} */ const encodeAddResult = out => { diff --git a/packages/ipfs-message-port-server/src/dag.js b/packages/ipfs-message-port-server/src/dag.js index 386af65b26..bbf0a41816 100644 --- a/packages/ipfs-message-port-server/src/dag.js +++ b/packages/ipfs-message-port-server/src/dag.js @@ -5,12 +5,12 @@ const { decodeNode, encodeNode } = require('ipfs-message-port-protocol/src/dag') const collect = require('it-all') /** - * @typedef {import('./ipfs').IPFS} IPFS + * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {import('ipfs-message-port-protocol/src/cid').CID} CID * @typedef {import('ipfs-message-port-protocol/src/cid').EncodedCID} EncodedCID * @typedef {import('ipfs-message-port-protocol/src/dag').DAGNode} DAGNode * @typedef {import('ipfs-message-port-protocol/src/dag').EncodedDAGNode} EncodedDAGNode - * + * @typedef {import('ipfs-core-types/src/dag').PutOptions} PutOptions * * @typedef {Object} DAGEntry * @property {DAGNode} value @@ -28,14 +28,9 @@ exports.DAGService = class DAGService { /** * @typedef {Object} PutDag * @property {EncodedDAGNode} dagNode - * @property {string} [hashAlg] - * @property {EncodedCID|void} [cid] - * @property {boolean} [pin] - * @property {boolean} [preload] - * @property {number} [timeout] - * @property {AbortSignal} [signal] + * @property {EncodedCID} [cid] * - * @param {PutDag} query + * @param {PutOptions & PutDag} query * @returns {Promise} */ async put (query) { @@ -51,7 +46,7 @@ exports.DAGService = class DAGService { /** * @typedef {Object} GetResult * @property {Transferable[]} transfer - * @property {string} remainderPath + * @property {string} [remainderPath] * @property {EncodedDAGNode} value * * @typedef {Object} GetDAG diff --git a/packages/ipfs-message-port-server/src/files.js b/packages/ipfs-message-port-server/src/files.js index f1084dcd5b..f3e1222cd5 100644 --- a/packages/ipfs-message-port-server/src/files.js +++ b/packages/ipfs-message-port-server/src/files.js @@ -10,8 +10,9 @@ const { encodeCID } = require('ipfs-message-port-protocol/src/cid') /** * @typedef {import('ipfs-message-port-protocol/src/data').HashAlg} HashAlg * @typedef {import('ipfs-message-port-protocol/src/data').Mode} Mode - * @typedef {import('./ipfs').IPFS} IPFS + * @typedef {import('ipfs-core-types').IPFS} IPFS * @typedef {Stat} EncodedStat + * @typedef {import('ipfs-core-types/src/mfs').StatOptions} StatOptions */ exports.FilesService = class FilesService { @@ -26,11 +27,6 @@ exports.FilesService = class FilesService { /** * @typedef {Object} StatQuery * @property {string} path - * @property {boolean} [hash=false] - * @property {boolean} [size=false] - * @property {boolean} [withLocal=false] - * @property {number} [timeout] - * @property {AbortSignal} [signal] * * @typedef {Object} Stat * @property {EncodedCID} cid @@ -46,7 +42,7 @@ exports.FilesService = class FilesService { * @property {Stat} stat * @property {Transferable[]} transfer * - * @param {StatQuery} input + * @param {StatOptions & StatQuery} input * @returns {Promise} */ async stat (input) { diff --git a/packages/ipfs-message-port-server/src/ipfs.ts b/packages/ipfs-message-port-server/src/ipfs.ts index 197e47b4ea..6f891dde2e 100644 --- a/packages/ipfs-message-port-server/src/ipfs.ts +++ b/packages/ipfs-message-port-server/src/ipfs.ts @@ -9,17 +9,12 @@ import { } from 'ipfs-message-port-protocol/src/data' import { ReadStream } from 'fs' import Block from 'ipld-block' +import { + AddAllOptions, + AddOptions +} from 'ipfs-core-types/src/root' type Mode = string | number -export interface IPFS extends Core { - dag: DAG - files: Files - block: BlockService -} - -export interface IPFSFactory { - create: () => Promise -} export interface AbortOptions { timeout?: number @@ -56,27 +51,12 @@ export interface DAG { } export interface Core { - addAll: (inputs: AddAllInput, options: AddOptions) => AsyncIterable + addAll: (inputs: AddAllInput, options: AddAllOptions) => AsyncIterable add: (input: AddInput, options: AddOptions) => Promise cat: (ipfsPath: CID | string, options: CatOptions) => AsyncIterable - ls: (ipfsPath: CID | string, options: CoreLsOptions) => AsyncIterable } -export interface AddOptions extends AbortOptions { - chunker?: string - cidVersion?: number - enableShardingExperiment?: boolean - hashAlg?: HashAlg - onlyHash?: boolean - pin?: boolean - progress?: (progress: number) => void - rawLeaves?: boolean - shardSplitThreshold?: number - trickle?: boolean - wrapWithDirectory?: boolean -} - export interface FileInput { path?: string content?: FileContent diff --git a/packages/ipfs-message-port-server/src/service.js b/packages/ipfs-message-port-server/src/service.js index 5c48f3836b..e56fb4038b 100644 --- a/packages/ipfs-message-port-server/src/service.js +++ b/packages/ipfs-message-port-server/src/service.js @@ -8,12 +8,11 @@ const { FilesService } = require('./files') const { BlockService } = require('./block') /** - * @typedef {import('./ipfs').IPFS} IPFS + * @typedef {import('ipfs-core-types').IPFS} IPFS */ exports.IPFSService = class IPFSService { /** - * * @param {IPFS} ipfs */ constructor (ipfs) { diff --git a/packages/ipfs-message-port-server/tsconfig.json b/packages/ipfs-message-port-server/tsconfig.json index a7d52a397b..a07616b332 100644 --- a/packages/ipfs-message-port-server/tsconfig.json +++ b/packages/ipfs-message-port-server/tsconfig.json @@ -1,7 +1,8 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ "src", diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index d45df36021..f43221f4a3 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -22,7 +22,6 @@ "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { - "prepare": "npm run build", "build": "aegir build", "lint": "aegir lint", "test": "echo 'Only interface tests live here'", @@ -40,7 +39,7 @@ "debug": "^4.1.1", "ipfs-cli": "^0.4.4", "ipfs-core": "^0.5.4", - "ipfs-repo": "^8.0.0", + "ipfs-repo": "^9.0.0", "semver": "^7.3.2", "update-notifier": "^5.0.0" }, diff --git a/packages/ipfs/tsconfig.json b/packages/ipfs/tsconfig.json index 76a587cd96..b8eade7f51 100644 --- a/packages/ipfs/tsconfig.json +++ b/packages/ipfs/tsconfig.json @@ -1,11 +1,12 @@ { - "extends": "../../tsconfig.json", + "extends": "../../node_modules/aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "importsNotUsedAsValues": "preserve" }, "include": [ - "package.json", - "./src" + "src", + "package.json" ], "references": [ { diff --git a/tsconfig.json b/tsconfig.json deleted file mode 100644 index 2c7ec5306b..0000000000 --- a/tsconfig.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "compilerOptions": { - "allowJs": true, - "checkJs": true, - "forceConsistentCasingInFileNames": true, - "noImplicitReturns": false, - "noImplicitAny": false, - "noImplicitThis": true, - "noFallthroughCasesInSwitch": true, - "noUnusedLocals": true, - "noUnusedParameters": false, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "strictBindCallApply": true, - "strict": true, - "alwaysStrict": true, - "esModuleInterop": true, - "target": "ES2018", - "moduleResolution": "node", - "declaration": true, - "declarationMap": true, - "skipLibCheck": true, - "stripInternal": true, - "resolveJsonModule": true, - "incremental": true, - "isolatedModules": true, - "baseUrl": ".", - "paths": { - "interface-ipfs-core/*": [ - "interface-ipfs-core/*" - ], - "ipfs/*": [ - "ipfs/*" - ], - "ipfs-cli/*": [ - "ipfs-cli/*" - ], - "ipfs-client/*": [ - "ipfs-client/*" - ], - "ipfs-core/*": [ - "ipfs-core/*" - ], - "ipfs-core-utils/*": [ - "ipfs-core-utils/*" - ], - "ipfs-daemon/*": [ - "ipfs-daemon/*" - ], - "ipfs-grpc-client/*": [ - "ipfs-grpc-client/*" - ], - "ipfs-grpc-server/*": [ - "ipfs-grpc-server/*" - ], - "ipfs-http-client/*": [ - "ipfs-http-client/*" - ], - "ipfs-http-gateway/*": [ - "ipfs-http-gateway/*" - ], - "ipfs-http-server/*": [ - "ipfs-http-server/*" - ], - "ipfs-message-port-protocol/*": [ - "ipfs-message-port-protocol/*" - ], - "ipfs-message-port-server/*": [ - "ipfs-message-port-server/*" - ] - }, - "composite": true, - "emitDeclarationOnly": true - }, - "exclude": [ - "dist", - "packages/*/dist", - "packages/*/test", - "packages/*/node_modules", - "node_modules" - ], - "compileOnSave": false -}