From 5872e31d78019734cb15cb48a49e1ae493f75a07 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Mon, 13 Mar 2017 23:17:49 +0100 Subject: [PATCH] feat: migrate to datastore from pull-blob-store --- README.md | 172 +++++------- example.js | 18 ++ package.json | 53 ++-- src/blockstore.js | 139 ++++++++++ src/config.js | 52 ++++ src/default-options-browser.js | 11 + src/default-options.js | 8 + src/index.js | 243 ++++++++++++++--- src/lock-memory.js | 51 ++++ src/lock.js | 55 ++++ src/stores/blockstore.js | 190 -------------- src/stores/config.js | 52 ---- src/stores/index.js | 9 - src/stores/keys.js | 16 -- src/stores/locks.js | 63 ----- src/stores/version.js | 49 ---- src/version.js | 63 +++++ test/blockstore-test.js | 246 +++++++----------- test/browser.js | 56 +--- test/datastore-test.js | 91 +++++++ test/interop-test.js | 37 +++ test/node.js | 84 ++++-- test/repo-test.js | 70 +---- ...HSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data | 0 ...DDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data | 0 ...B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data | Bin ...7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data | 0 ...SIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data | 0 ...YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data | Bin ...LZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data | 0 ...774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data | Bin ...LJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data | 0 ...VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data | 0 ...WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data | Bin ...AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data | 0 ...TJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data | 0 ...33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data | 0 ...HTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data | Bin ...57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data | 0 ...JOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data | 0 ...LP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data | 0 test/test-repo/blocks/SHARDING | 1 + ...ANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data | Bin ...NH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data | Bin ...LOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data | 0 ...2BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data | 0 ...2FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data | Bin test/test-repo/blocks/_README | 30 +++ test/test-repo/version | 2 +- 49 files changed, 1048 insertions(+), 813 deletions(-) create mode 100644 example.js create mode 100644 src/blockstore.js create mode 100644 src/config.js create mode 100644 src/default-options-browser.js create mode 100644 src/default-options.js create mode 100644 src/lock-memory.js create mode 100644 src/lock.js delete mode 100644 src/stores/blockstore.js delete mode 100644 src/stores/config.js delete mode 100644 src/stores/index.js delete mode 100644 src/stores/keys.js delete mode 100644 src/stores/locks.js delete mode 100644 src/stores/version.js create mode 100644 src/version.js create mode 100644 test/datastore-test.js create mode 100644 test/interop-test.js rename test/test-repo/blocks/{CIQEU => 2F}/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data (100%) rename test/test-repo/blocks/{CIQFF => 5V}/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data (100%) rename test/test-repo/blocks/{CIQMB => 75}/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data (100%) rename test/test-repo/blocks/{CIQKK => 7J}/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data (100%) rename test/test-repo/blocks/{CIQON => AE}/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data (100%) rename test/test-repo/blocks/{CIQHA => AP}/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data (100%) rename test/test-repo/blocks/{CIQDD => C4}/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data (100%) rename test/test-repo/blocks/{CIQDM => CY}/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data (100%) rename test/test-repo/blocks/{CIQIX => FN}/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data (100%) rename test/test-repo/blocks/{CIQH7 => GQ}/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data (100%) rename test/test-repo/blocks/{CIQDD => HD}/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data (100%) rename test/test-repo/blocks/{CIQJF => IL}/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data (100%) rename test/test-repo/blocks/{CIQJB => LG}/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data (100%) rename test/test-repo/blocks/{CIQOY => O6}/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data (100%) rename test/test-repo/blocks/{CIQGP => QF}/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data (100%) rename test/test-repo/blocks/{CIQOH => QV}/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data (100%) rename test/test-repo/blocks/{CIQBE => R3}/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data (100%) rename test/test-repo/blocks/{CIQHB => S5}/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data (100%) create mode 100644 test/test-repo/blocks/SHARDING rename test/test-repo/blocks/{CIQFE => TW}/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data (100%) rename test/test-repo/blocks/{CIQOM => UN}/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data (100%) rename test/test-repo/blocks/{CIQGF => VO}/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data (100%) rename test/test-repo/blocks/{CIQFT => X3}/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data (100%) rename test/test-repo/blocks/{CIQJG => XO}/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data (100%) create mode 100644 test/test-repo/blocks/_README diff --git a/README.md b/README.md index 2f699516..ff7d5039 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,6 @@ This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs ## Table of Contents - [Background](#background) - - [Good to know (historical context)](#good-to-know-historical-context) - [Install](#install) - [npm](#npm) - [Use in Node.js](#use-in-nodejs) @@ -27,16 +26,6 @@ This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) - [Usage](#usage) - [API](#api) - - [var repo = new IPFSRepo(path, opts)](#var-repo--new-ipfsrepopath-opts) - - [repo.exists(cb)](#repoexistscb) - - [repo.version.get(cb(err, version))](#repoversiongetcberr-version) - - [repo.version.set(version, cb(err))](#repoversionsetversion-cberr) - - [repo.config.get(cb(err, config))](#repoconfiggetcberr-config) - - [repo.config.set(config, cb(err))](#repoconfigsetconfig-cberr) - - [repo.keys](#repokeys) - - [repo.blockstore.putStream()](#) - - [repo.blockstore.getStream(key, extension)](#) - - [repo.datastore](#repodatastore) - [Contribute](#contribute) - [License](#license) @@ -45,62 +34,60 @@ This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs Here is the architectural reasoning for this repo: ```bash -┌─────────────────────────────────┐ -│ interface defined by Repo Spec │ -├─────────────────────────────────┤ -│ │ ┌──────────────────────┐ -│ │ │ interface-pull-blob-store │ -│ IPFS REPO │─────────────────────────────────▶│ interface │ -│ │ ├──────────────────────┤ -│ │ │ locks │ -└─────────────────────────────────┘ └──────────────────────┘ - │ - ┌──────────┴────┬───────────────┬───────────────┬───────────────┬───────────────┐ - ▼ ▼ ▼ ▼ ▼ ▼ -┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐ -│ abstract │ │ abstract │ │ abstract │ │ abstract │ │ abstract │ │ abstract │ -│ -blob │ │ -blob │ │ -blob │ │ -blob │ │ -blob │ │ -blob │ -│ -store │ │ -store │ │ -store │ │ -store │ │ -store │ │ -store │ -│ interface │ │ interface │ │ interface │ │ interface │ │ interface │ │ interface │ -├───────────┤ ├───────────┤ ├───────────┤ ├───────────┤ ├───────────┤ ├───────────┤ -│ │ │ │ │ │ │ │ │ │ │ │ -│ keys │ │ config │ │ blockstore │ │ datastore │ │ logs │ │ version │ -│ │ │ │ │ │ │ │ │ │ │ │ -└───────────┘ └───────────┘ └───────────┘ └───────────┘ └───────────┘ └───────────┘ + ┌────────────────────────────────────────┐ + │ IPFSRepo │ + └────────────────────────────────────────┘ + ┌─────────────────┐ + │ / │ + ├─────────────────┤ + │ Datastore │ + └─────────────────┘ + │ + ┌───────────┴───────────┐ + │ │ + ┌─────────────────┐ ┌─────────────────┐ + │ /blocks │ │ /datastore │ + ├─────────────────┤ ├─────────────────┤ + │ Datastore │ │ LevelDatastore │ + └─────────────────┘ └─────────────────┘ + +┌────────────────────────────────────────┐ ┌────────────────────────────────────────┐ +│ IPFSRepo - Default Node.js │ │ IPFSRepo - Default Browser │ +└────────────────────────────────────────┘ └────────────────────────────────────────┘ + ┌─────────────────┐ ┌─────────────────┐ + │ / │ │ / │ + ├─────────────────┤ ├─────────────────┤ + │ FsDatastore │ │LevelJSDatastore │ + └─────────────────┘ └─────────────────┘ + │ │ + ┌───────────┴───────────┐ ┌───────────┴───────────┐ + │ │ │ │ +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ /blocks │ │ /datastore │ │ /blocks │ │ /datastore │ +├─────────────────┤ ├─────────────────┤ ├─────────────────┤ ├─────────────────┤ +│ FlatfsDatastore │ │LevelDBDatastore │ │LevelJSDatastore │ │LevelJSDatastore │ +└─────────────────┘ └─────────────────┘ └─────────────────┘ └─────────────────┘ ``` -This provides a well defined interface for creating and interacting with an IPFS -Repo backed by a group of abstract backends for keys, configuration, logs, and -more. Each of the individual repos has an interface defined by -[interface-pull-blob-store](https://github.com/ipfs/interface-pull-blob-store): this -enables us to make IPFS Repo portable (running on Node.js vs the browser) and -accept different types of storage mechanisms for each repo (fs, levelDB, etc). - -### Good to know (historical context) - -- The datastore folder holds the legacy version of datastore, still built in levelDB, there is a current endeavour of pushing it to fs completely. -- The blocks folder is the current version of datastore. -- The keys repo doesn't exist yet, as the private key is simply stored inside config +This provides a well defined interface for creating and interacting with an IPFS repo. ## Install ### npm ```sh -> npm i ipfs-repo +> npm install ipfs-repo ``` ### Use in Node.js -```JavaScript +```js var IPFSRepo = require('ipfs-repo') ``` ### Use in a browser with browserify, webpack or any other bundler -The code published to npm that gets loaded on require is in fact a ES5 transpiled version with the right shims added. This means that you can require it and use with your favourite bundler without having to adjust asset management process. - -```JavaScript +```js var IPFSRepo = require('ipfs-repo') ``` @@ -119,77 +106,42 @@ Loading this module through a script tag will make the `IpfsRepo` obj available Example: ```js -var inMemoryBS = require('interface-pull-blob-store') -// inMemoryBS is an "in memory" blob store, you can find others at: -// https://github.com/ipfs/interface-pull-blob-store#modules-that-use-this +const Repo = require('ipfs-repo') +const repo = new Repo('/tmp/ipfs-repo') -var IPFSRepo = require('ipfs-repo') -var repo = new IPFSRepo('/Users/someone/.ipfs', { - stores: inMemoryBS -}) -``` +repo.init({ cool: 'config' }, (err) => { + if (err) { + throw err + } -## API + repo.open((err) => { + if (err) { + throw err + } -```js -var IPFSRepo = require('ipfs-repo') + console.log('repo is ready') + }) +}) ``` -### var repo = new IPFSRepo(path, opts) - -Creates a **reference** to an IPFS repository at the path `path`. This does -*not* create the repo, but is an object that refers to the repo at such a path. - -Valid keys for `opts` include: +This now has created the following structure, either on disk or as an in memory representation: -- `stores`: either an - [interface-pull-blob-store](https://github.com/ipfs/interface-pull-blob-store), or a - map of the form - -```js -{ - keys: someBlobStore, - config: someBlobStore, - datastore: someBlobStore, - logs: someBlobStore, - locks: someBlobStore, - version: someBlobStore -} +``` +├── blocks +│   ├── SHARDING +│ └── _README +├── config +├── datastore +└── version ``` -If you use the former form, all of the sub-blob-stores will use the same store. - -### repo.exists(cb) - -Check if the repo you are going to access already exists. Calls the callback -`cb(err, exists)`, where `exists` is true or false. - -### repo.version.get(cb(err, version)) -### repo.version.set(version, cb(err)) - -Read/write the version number of the repository. The version number is the repo version number. - -### repo.config.get(cb(err, config)) -### repo.config.set(config, cb(err)) - -Read/write the configuration object of the repository. - -### repo.keys - -Read/write keys inside the repo. This feature will be expanded once -[IPRS](https://github.com/ipfs/specs/tree/master/records) and -[KeyChain](https://github.com/ipfs/specs/tree/master/keychain) are finalized and implemented on go-ipfs. - -### repo.blockstore.putStream() -### repo.datastore.getStream(key, extension) -### repo.datastore.has(key, extension, cb) -### repo.datastore.delete(key, extension, cb) +## API -Read and write buffers to/from the repo's block store. +See https://ipfs.github.io/js-ipfs-repo -### repo.datastore +## Notes -**WIP** +- [Explanation of how repo is structured](https://github.com/ipfs/js-ipfs-repo/pull/111#issuecomment-279948247) ## Contribute diff --git a/example.js b/example.js new file mode 100644 index 00000000..73f56bc6 --- /dev/null +++ b/example.js @@ -0,0 +1,18 @@ +'use strict' + +const Repo = require('ipfs-repo') +const repo = new Repo('/Users/awesome/.jsipfs') + +repo.init({my: 'config'}, (err) => { + if (err) { + throw err + } + + repo.open((err) => { + if (err) { + throw err + } + + console.log('repo is ready') + }) +}) diff --git a/package.json b/package.json index e1970bf3..350dbdf8 100644 --- a/package.json +++ b/package.json @@ -3,24 +3,31 @@ "version": "0.11.3", "description": "IPFS Repo implementation", "main": "src/index.js", + "browser": { + "./src/lock.js": "./src/lock-memory.js", + "./src/default-options.js": "./src/default-options-browser.js" + }, "scripts": { "test": "aegir-test", "test:node": "aegir-test node", "test:browser": "aegir-test browser", "build": "aegir-build", "lint": "aegir-lint", - "release": "aegir-release", - "release-minor": "aegir-release --type minor", - "release-major": "aegir-release --type major", + "release": "aegir-release --docs", + "release-minor": "aegir-release --type minor --docs", + "release-major": "aegir-release --type major --docs", "coverage": "aegir-coverage", - "coverage-publish": "aegir-coverage publish" + "coverage-publish": "aegir-coverage publish", + "docs": "aegir-docs" }, "repository": { "type": "git", "url": "https://github.com/ipfs/js-ipfs-repo.git" }, "keywords": [ - "IPFS" + "IPFS", + "libp2p", + "datastore" ], "pre-commit": [ "lint", @@ -28,30 +35,34 @@ ], "homepage": "https://github.com/ipfs/js-ipfs-repo", "engines": { - "node": ">=4.0.0" + "node": ">=4.0.0", + "npm": ">=3.0.0" }, "devDependencies": { - "aegir": "^10.0.0", - "buffer-loader": "^0.0.1", + "aegir": "^11.0.0", "chai": "^3.5.0", - "fs-pull-blob-store": "~0.4.1", - "idb-pull-blob-store": "~0.5.1", - "interface-pull-blob-store": "~0.6.0", + "dirty-chai": "^1.2.2", "lodash": "^4.17.4", - "multihashes": "~0.4.0", + "memdown": "^1.2.4", + "multihashes": "~0.4.4", "ncp": "^2.0.0", "pre-commit": "^1.2.2", - "rimraf": "^2.5.4" + "rimraf": "^2.6.1" }, "dependencies": { - "async": "^2.1.4", + "async": "^2.1.5", "base32.js": "^0.1.0", - "ipfs-block": "~0.5.5", - "lock": "^0.1.3", - "multihashes": "~0.3.3", - "pull-defer": "^0.2.2", - "pull-stream": "^3.5.0", - "pull-write": "^1.1.1", + "cids": "^0.4.2", + "datastore-core": "^0.1.0", + "datastore-fs": "^0.1.1", + "datastore-level": "^0.1.0", + "debug": "^2.6.3", + "interface-datastore": "^0.1.1", + "ipfs-block": "~0.6.0", + "level-js": "^2.2.4", + "leveldown": "^1.6.0", + "lock-me": "^1.0.2", + "multiaddr": "^2.2.2", "safe-buffer": "^5.0.1" }, "license": "MIT", @@ -69,4 +80,4 @@ "nginnever ", "npmcdn-to-unpkg-bot " ] -} \ No newline at end of file +} diff --git a/src/blockstore.js b/src/blockstore.js new file mode 100644 index 00000000..eb660411 --- /dev/null +++ b/src/blockstore.js @@ -0,0 +1,139 @@ +'use strict' + +const NamespaceStore = require('datastore-core').NamespaceDatastore +const Key = require('interface-datastore').Key +const base32 = require('base32.js') +const Block = require('ipfs-block') +const setImmediate = require('async/setImmediate') +const reject = require('async/reject') +const CID = require('cids') + +const blockPrefix = new Key('blocks') + +/** + * Transform a raw buffer to a base32 encoded key. + * + * @param {Buffer} rawKey + * @returns {Key} + */ +const keyFromBuffer = (rawKey) => { + const enc = new base32.Encoder() + return new Key('/' + enc.write(rawKey).finalize(), false) +} + +/** + * Transform a cid to the appropriate datastore key. + * + * @param {CID} cid + * @returns {Key} + */ +const cidToDsKey = (cid) => { + return keyFromBuffer(cid.buffer) +} + +module.exports = (repo) => { + const store = new NamespaceStore(repo.store, blockPrefix) + return { + /** + * Get a single block by CID. + * + * @param {CID} cid + * @param {function(Error, Block)} callback + * @returns {void} + */ + get (cid, callback) { + if (!CID.isCID(cid)) { + return setImmediate(() => { + callback(new Error('Not a valid cid')) + }) + } + + const k = cidToDsKey(cid) + store.get(k, (err, blockData) => { + if (err) { + return callback(err) + } + + callback(null, new Block(blockData, cid)) + }) + }, + put (block, callback) { + if (!Block.isBlock(block)) { + return setImmediate(() => { + callback(new Error('invalid block')) + }) + } + + const k = cidToDsKey(block.cid) + + store.has(k, (err, exists) => { + if (err) { + return callback(err) + } + if (exists) { + return callback() + } + + store.put(k, block.data, callback) + }) + }, + /** + * Like put, but for more. + * + * @param {Array} blocks + * @param {function(Error)} callback + * @returns {void} + */ + putMany (blocks, callback) { + const keys = blocks.map((b) => ({ + key: cidToDsKey(b.cid), + block: b + })) + + const batch = store.batch() + reject(keys, (k, cb) => store.has(k.key, cb), (err, newKeys) => { + if (err) { + return callback(err) + } + + newKeys.forEach((k) => { + batch.put(k.key, k.block.data) + }) + + batch.commit(callback) + }) + }, + /** + * Does the store contain block with this cid? + * + * @param {CID} cid + * @param {function(Error, bool)} callback + * @returns {void} + */ + has (cid, callback) { + if (!CID.isCID(cid)) { + return setImmediate(() => { + callback(new Error('Not a valid cid')) + }) + } + + store.has(cidToDsKey(cid), callback) + }, + /** + * Delete a block from the store + * + * @param {CID} cid + * @param {function(Error)} callback + * @returns {void} + */ + delete (cid, callback) { + if (!CID.isCID(cid)) { + return setImmediate(() => { + callback(new Error('Not a valid cid')) + }) + } + + store.delete(cidToDsKey(cid), callback) + } + } +} diff --git a/src/config.js b/src/config.js new file mode 100644 index 00000000..a92f7f65 --- /dev/null +++ b/src/config.js @@ -0,0 +1,52 @@ +'use strict' + +const Key = require('interface-datastore').Key + +const configKey = new Key('config') + +module.exports = (store) => { + return { + /** + * Get the current configuration from the repo. + * + * @param {function(Error, Object)} callback + * @returns {void} + */ + get (callback) { + store.get(configKey, (err, value) => { + if (err) { + return callback(err) + } + + let config + try { + config = JSON.parse(value.toString()) + } catch (err) { + return callback(err) + } + callback(null, config) + }) + }, + /** + * Set the current configuration for this repo. + * + * @param {Object} config - the config object to be written + * @param {function(Error)} callback + * @returns {void} + */ + set (config, callback) { + const buf = new Buffer(JSON.stringify(config, null, 2)) + + store.put(configKey, buf, callback) + }, + /** + * Check if a config file exists. + * + * @param {function(Error, bool)} callback + * @returns {void} + */ + exists (callback) { + store.has(configKey, callback) + } + } +} diff --git a/src/default-options-browser.js b/src/default-options-browser.js new file mode 100644 index 00000000..9d1d9aea --- /dev/null +++ b/src/default-options-browser.js @@ -0,0 +1,11 @@ +'use strict' + +// Default configuration for a repo in the browser +module.exports = { + fs: require('datastore-level'), + sharding: false, + fsOptions: { + db: require('level-js') + }, + level: require('level-js') +} diff --git a/src/default-options.js b/src/default-options.js new file mode 100644 index 00000000..5828e8ce --- /dev/null +++ b/src/default-options.js @@ -0,0 +1,8 @@ +'use strict' + +// Default configuration for a repo in node.js +module.exports = { + fs: require('datastore-fs'), + level: require('leveldown'), + sharding: true +} diff --git a/src/index.js b/src/index.js index 3a110cdd..334a58f3 100644 --- a/src/index.js +++ b/src/index.js @@ -1,56 +1,239 @@ 'use strict' +const core = require('datastore-core') +const MountStore = core.MountDatastore +const ShardingStore = core.ShardingDatastore + +const Key = require('interface-datastore').Key +const LevelStore = require('datastore-level') +const waterfall = require('async/waterfall') +const series = require('async/series') +const parallel = require('async/parallel') +const Multiaddr = require('multiaddr') +const Buffer = require('safe-buffer').Buffer const assert = require('assert') +const path = require('path') +const debug = require('debug') + +const version = require('./version') +const config = require('./config') +const blockstore = require('./blockstore') -const stores = require('./stores') +const log = debug('repo') -module.exports = class Repo { +const apiFile = new Key('api') +const flatfsDirectory = 'blocks' +const levelDirectory = 'datastore' +const repoVersion = 5 + +/** + * IpfsRepo implements all required functionality to read and write to an ipfs repo. + * + */ +class IpfsRepo { + /** + * @param {string} repoPath - path where the repo is stored + * @param {object} options - Configuration + * @param {Datastore} options.fs + * @param {Leveldown} options.level + * @param {object} [options.fsOptions={}] + * @param {bool} [options.sharding=true] - Enable sharding (flatfs on disk), not needed in the browser. + * @param {string} [options.lock='fs'] - Either `fs` or `memory`. + */ constructor (repoPath, options) { assert.equal(typeof repoPath, 'string', 'missing repoPath') - assert(options, 'missing options') - assert(options.stores, 'missing options.stores') + if (options == null) { + options = require('./default-options') + } + + this.closed = true this.path = repoPath + this.options = Object.assign({ + sharding: true, + lock: 'fs' + }, options) + this._fsOptions = Object.assign({}, options.fsOptions) + + const FsStore = this.options.fs + this._fsStore = new FsStore(this.path, Object.assign({}, this._fsOptions, { + extension: '' + })) + + this.version = version(this._fsStore) + this.config = config(this._fsStore) + + if (this.options.lock === 'memory') { + this._locker = require('./lock-memory') + } else if (this.options.lock === 'fs') { + this._locker = require('./lock') + } else { + throw new Error('Unkown lock options: ' + this.options.lock) + } + } + + /** + * Initialize a new repo. + * + * @param {Object} config - config to write into `config`. + * @param {function(Error)} callback + * @returns {void} + */ + init (config, callback) { + log('initializing at: %s', this.path) + series([ + (cb) => this.config.set(config, cb), + (cb) => this.version.set(repoVersion, cb) + ], callback) + } + + /** + * Open the repo. If the repo is already open no action will be taken. + * If the repo is not initialized it will return an error. + * + * @param {function(Error)} callback + * @returns {void} + */ + open (callback) { + if (!this.closed) { + return callback(new Error('repo is already open')) + } + log('opening at: %s', this.path) + + // check if the repo is already initialized + waterfall([ + (cb) => this._isInitialized(cb), + (cb) => this._locker.lock(this.path, cb), + (lck, cb) => { + log('aquired repo.lock') + this.lockfile = lck - const blobStores = initializeBlobStores(options.stores) + log('creating flatfs') + const FsStore = this.options.fs + const s = new FsStore(path.join(this.path, flatfsDirectory), this._fsOptions) - const setup = (name, needs) => { - needs = needs || {} - const args = [repoPath, blobStores[name]] - if (needs.locks) { - args.push(this.locks) + if (this.options.sharding) { + const shard = new core.shard.NextToLast(2) + ShardingStore.createOrOpen(s, shard, cb) + } else { + cb(null, s) + } + }, + (flatfs, cb) => { + log('Flatfs store opened') + this.store = new MountStore([{ + prefix: new Key(flatfsDirectory), + datastore: flatfs + }, { + prefix: new Key('/'), + datastore: new LevelStore(path.join(this.path, levelDirectory), { + db: this.options.level + }) + }]) + + this.blockstore = blockstore(this) + this.closed = false + cb() + } + ], (err) => { + if (err && this.lockfile) { + return this.lockfile.close((err2) => { + log('error removing lock', err2) + callback(err) + }) } - if (needs.config) { - args.push(this.config) + callback(err) + }) + } + + /** + * Check if the repo is already initialized. + * + * @private + * @param {function(Error)} callback + * @returns {void} + */ + _isInitialized (callback) { + parallel([ + (cb) => this.config.exists(cb), + (cb) => this.version.check(repoVersion, cb) + ], (err, res) => { + if (err) { + return callback(err) } - return stores[name].setUp.apply(stores[name], args) + if (!res[0]) { + return callback(new Error('repo is not initialized yet')) + } + callback() + }) + } + + /** + * Close the repo and cleanup. + * + * @param {function(Error)} callback + * @returns {void} + */ + close (callback) { + if (this.closed) { + return callback(new Error('repo is already closed')) } - this.locks = setup('locks') - this.version = setup('version', {locks: true}) - this.config = setup('config', {locks: true}) - this.keys = setup('keys', {locks: true, config: true}) - this.blockstore = setup('blockstore', {locks: true}) + log('closing at: %s', this.path) + series([ + (cb) => this._fsStore.delete(apiFile, (err) => { + if (err && err.message.startsWith('ENOENT')) { + return cb() + } + cb(err) + }), + (cb) => this.store.close(cb), + (cb) => this._fsStore.close(cb), + (cb) => { + this.closed = true + this.lockfile.close(cb) + } + ], callback) } + /** + * Check if a repo exists. + * + * @param {function(Error, bool)} callback + * @returns {void} + */ exists (callback) { this.version.exists(callback) } -} -function initializeBlobStores (store) { - if (store.constructor) { - return { - keys: store, - config: store, - blockstore: store, - logs: store, - locks: store, - version: store - } + /** + * Set the api address, by writing it to the `/api` file. + * + * @param {Multiaddr} addr + * @param {function(Error)} callback + * @returns {void} + */ + setApiAddress (addr, callback) { + this._fsStore.put(apiFile, Buffer.from(addr.toString()), callback) } - return store + /** + * Returns the registered API address, according to the `/api` file in this respo. + * + * @param {function(Error, Mulitaddr)} callback + * @returns {void} + */ + apiAddress (callback) { + this._fsStore.get(apiFile, (err, rawAddr) => { + if (err) { + return callback(err) + } + + callback(null, new Multiaddr(rawAddr.toString())) + }) + } } + +module.exports = IpfsRepo diff --git a/src/lock-memory.js b/src/lock-memory.js new file mode 100644 index 00000000..9b6c2937 --- /dev/null +++ b/src/lock-memory.js @@ -0,0 +1,51 @@ +'use strict' + +const debug = require('debug') +const setImmediate = require('async/setImmediate') + +const log = debug('repo:lock') + +const lockFile = 'repo.lock' + +const LOCKS = {} + +/** + * Lock the repo in the given dir. + * + * @param {string} dir + * @param {function(Error, lock)} callback + * @returns {void} + */ +exports.lock = (dir, callback) => { + const file = dir + '/' + lockFile + log('locking %s', file) + LOCKS[file] = true + const closer = { + close (cb) { + if (LOCKS[file]) { + delete LOCKS[file] + } + setImmediate(cb) + } + } + setImmediate(() => { + callback(null, closer) + }) +} + +/** + * Check if the repo in the given directory is locked. + * + * @param {string} dir + * @param {function(Error, bool)} callback + * @returns {void} + */ +exports.locked = (dir, callback) => { + const file = dir + '/' + lockFile + log('checking lock: %s') + + const locked = LOCKS[file] + setImmediate(() => { + callback(null, locked) + }) +} diff --git a/src/lock.js b/src/lock.js new file mode 100644 index 00000000..dc6513fa --- /dev/null +++ b/src/lock.js @@ -0,0 +1,55 @@ +'use strict' + +const Lock = require('lock-me') +const path = require('path') +const debug = require('debug') +const fs = require('fs') + +const log = debug('repo:lock') + +const lockFile = 'repo.lock' +const lock = new Lock() + +/** + * Lock the repo in the given dir. + * + * @param {string} dir + * @param {function(Error, lock)} callback + * @returns {void} + */ +exports.lock = (dir, callback) => { + const file = path.join(dir, lockFile) + log('locking %s', file) + lock(file, callback) +} + +/** + * Check if the repo in the given directory is locked. + * + * @param {string} dir + * @param {function(Error, bool)} callback + * @returns {void} + */ +exports.locked = (dir, callback) => { + const file = path.join(dir, lockFile) + log('checking lock: %s') + + if (!fs.existsSync(file)) { + log('file does not exist: %s', file) + } + + lock(file, (err, lck) => { + if (err) { + log('already locked: %s', err.message) + return callback(null, true) + } + + log('no one has a lock') + lck.close((err) => { + if (err) { + return callback(err) + } + callback(null, false) + }) + }) +} diff --git a/src/stores/blockstore.js b/src/stores/blockstore.js deleted file mode 100644 index f321b542..00000000 --- a/src/stores/blockstore.js +++ /dev/null @@ -1,190 +0,0 @@ -'use strict' - -const Block = require('ipfs-block') -const Lock = require('lock') -const base32 = require('base32.js') -const path = require('path') -const pull = require('pull-stream') -const pullWrite = require('pull-write') -const pullDefer = require('pull-defer/source') -const parallel = require('async/parallel') - -const PREFIX_LENGTH = 5 -const EXTENSION = 'data' - -exports = module.exports - -function multihashToPath (multihash) { - const encoder = new base32.Encoder() - const hash = encoder.write(multihash).finalize() - const filename = `${hash}.${EXTENSION}` - const folder = filename.slice(0, PREFIX_LENGTH) - - return path.join(folder, filename) -} - -exports.setUp = (basePath, BlobStore, locks) => { - const store = new BlobStore(basePath + '/blocks') - const lock = new Lock() - - // blockBlob is an object with: - // { data: <>, key: <> } - function writeBlock (blockBlob, callback) { - if (!blockBlob || !blockBlob.data) { - return callback(new Error('Invalid block')) - } - - const key = multihashToPath(blockBlob.key) - - lock(key, (release) => { - pull( - pull.values([ - blockBlob.data - ]), - store.write(key, release(released)) - ) - }) - - // called once the lock is released - function released (err) { - if (err) { - return callback(err) - } - callback(null, { key: key }) - } - } - - return { - // returns a pull-stream of one block being read - getStream (key) { - if (!key) { - return pull.error(new Error('Invalid key')) - } - - const blockPath = multihashToPath(key) - const deferred = pullDefer() - - lock(blockPath, (release) => { - pull( - store.read(blockPath), - pull.collect(release(released)) - ) - }) - - function released (err, data) { - if (err) { - return deferred.abort(err) - } - - deferred.resolve( - pull.values([ - new Block(Buffer.concat(data)) - ]) - ) - } - - return deferred - }, - - /* - * putStream - write multiple blocks - * - * returns a pull-stream that expects blockBlobs - * - * NOTE: blockBlob is a { data: <>, key: <> } and not a - * ipfs-block instance. This is because Block instances support - * several types of hashing and it is up to the BlockService - * to understand the right one to use (given the CID) - */ - // TODO - // consider using a more explicit name, this can cause some confusion - // since the natural association is - // getStream - createReadStream - read one - // putStream - createWriteStream - write one - // where in fact it is: - // getStream - createReadStream - read one (the same) - // putStream - createFilesWriteStream = write several - // - putStream () { - let ended = false - let written = [] - let push = null - - const sink = pullWrite((blockBlobs, cb) => { - const tasks = writeTasks(blockBlobs) - parallel(tasks, cb) - }, null, 100, (err) => { - ended = err || true - if (push) { - push(ended) - } - }) - - const source = (end, cb) => { - if (end) { - ended = end - return cb(end) - } - - if (written.length) { - return cb(null, written.shift()) - } - - if (ended) { - return cb(ended) - } - - push = cb - } - - /* - * Creates individual tasks to write each block blob that can be - * exectured in parallel - */ - function writeTasks (blockBlobs) { - return blockBlobs.map((blockBlob) => { - return (cb) => { - writeBlock(blockBlob, (err, meta) => { - if (err) { - return cb(err) - } - - if (push) { - const read = push - push = null - read(null, meta) - return cb() - } - - written.push(meta) - cb() - }) - } - }) - } - - return { - source: source, - sink: sink - } - }, - - has (key, callback) { - if (!key) { - return callback(new Error('Invalid key')) - } - - const blockPath = multihashToPath(key) - store.exists(blockPath, callback) - }, - - delete (key, callback) { - if (!key) { - return callback(new Error('Invalid key')) - } - - const blockPath = multihashToPath(key) - store.remove(blockPath, callback) - } - } -} diff --git a/src/stores/config.js b/src/stores/config.js deleted file mode 100644 index c178e34a..00000000 --- a/src/stores/config.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict' - -const pull = require('pull-stream') -const series = require('async/series') - -exports = module.exports - -exports.setUp = (basePath, BlobStore, locks) => { - const store = new BlobStore(basePath) - const configFile = 'config' - - return { - get (callback) { - pull( - store.read(configFile), - pull.collect((err, values) => { - if (err) { - return callback(err) - } - - const config = Buffer.concat(values) - let result - try { - result = JSON.parse(config.toString()) - } catch (err) { - return callback(err) - } - - callback(null, result) - }) - ) - }, - - set (config, callback) { - series([ - (cb) => locks.lock(cb), - (cb) => { - pull( - pull.values([ - new Buffer(JSON.stringify(config, null, 2)) - ]), - store.write(configFile, cb) - ) - } - ], (err) => { - locks.unlock((err2) => { - callback(err || err2) - }) - }) - } - } -} diff --git a/src/stores/index.js b/src/stores/index.js deleted file mode 100644 index 01476e33..00000000 --- a/src/stores/index.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -exports = module.exports - -exports.locks = require('./locks') -exports.version = require('./version') -exports.config = require('./config') -exports.keys = require('./keys') -exports.blockstore = require('./blockstore') diff --git a/src/stores/keys.js b/src/stores/keys.js deleted file mode 100644 index 60831436..00000000 --- a/src/stores/keys.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -exports = module.exports - -exports.setUp = (basePath, BlobStore, locks, config) => { - return { - get (callback) { - config.get((err, config) => { - if (err) { - return callback(err) - } - callback(null, config.Identity.PrivKey) - }) - } - } -} diff --git a/src/stores/locks.js b/src/stores/locks.js deleted file mode 100644 index 34353ec0..00000000 --- a/src/stores/locks.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict' - -const pull = require('pull-stream') -const series = require('async/series') - -exports = module.exports - -exports.setUp = (basePath, BlobStore) => { - const store = new BlobStore(basePath) - const lockFile = 'repo.lock' - - return { - lock (callback) { - function createLock () { - pull( - pull.values([ - new Buffer('LOCK') - ]), - store.write(lockFile, callback) - ) - } - - function doesExist (err, exists) { - if (err) { - return callback(err) - } - - if (exists) { - // default 100ms - setTimeout(function () { - store.exists(lockFile, doesExist) - }, 100) - return - } - - createLock() - } - - store.exists(lockFile, doesExist) - }, - - unlock (callback) { - series([ - (cb) => { - store.remove(lockFile, cb) - }, - (cb) => { - store.exists(lockFile, (err, exists) => { - if (err) { - return cb(err) - } - - if (exists) { - return cb(new Error('failed to remove lock')) - } - - cb() - }) - } - ], callback) - } - } -} diff --git a/src/stores/version.js b/src/stores/version.js deleted file mode 100644 index a5f9428d..00000000 --- a/src/stores/version.js +++ /dev/null @@ -1,49 +0,0 @@ -'use strict' - -const pull = require('pull-stream') -const series = require('async/series') -const toBuffer = require('safe-buffer').Buffer.from - -exports = module.exports - -exports.setUp = (basePath, BlobStore, locks) => { - const store = new BlobStore(basePath) - const versionFile = 'version' - - return { - exists (callback) { - store.exists(versionFile, callback) - }, - get (callback) { - pull( - store.read(versionFile), - pull.collect((err, values) => { - if (err) { - return callback(err) - } - - const version = Buffer.concat(values).toString('utf8') - callback(null, version) - }) - ) - }, - set (value, callback) { - series([ - (cb) => locks.lock(cb), - (cb) => { - const values = [ - Buffer.isBuffer(value) ? value : toBuffer(value) - ] - pull( - pull.values(values), - store.write(versionFile, cb) - ) - } - ], (err) => { - locks.unlock((err2) => { - callback(err || err2) - }) - }) - } - } -} diff --git a/src/version.js b/src/version.js new file mode 100644 index 00000000..ab152cfe --- /dev/null +++ b/src/version.js @@ -0,0 +1,63 @@ +'use strict' + +const Key = require('interface-datastore').Key +const debug = require('debug') +const log = debug('repo:version') + +const versionKey = new Key('version') + +module.exports = (store) => { + return { + /** + * Check if a version file exists. + * + * @param {function(Error, bool)} callback + * @returns {void} + */ + exists (callback) { + store.has(versionKey, callback) + }, + /** + * Get the current version. + * + * @param {function(Error, number)} callback + * @returns {void} + */ + get (callback) { + store.get(versionKey, (err, buf) => { + if (err) { + return callback(err) + } + callback(null, parseInt(buf.toString().trim(), 10)) + }) + }, + /** + * Set the version of the repo, writing it to the underlying store. + * + * @param {number} version + * @param {function(Error)} callback + * @returns {void} + */ + set (version, callback) { + store.put(versionKey, new Buffer(String(version)), callback) + }, + /** + * Check the current version, and return an error on missmatch + * @param {number} expected + * @param {function(Error)} callback + * @returns {void} + */ + check (expected, callback) { + this.get((err, version) => { + if (err) { + return callback(err) + } + log('comparing version: %s and %s', version, expected) + if (version !== expected) { + return callback(new Error(`version mismatch: expected v${expected}, found v${version}`)) + } + callback() + }) + } + } +} diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 075def0c..4449bed4 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -2,178 +2,141 @@ /* eslint-env mocha */ 'use strict' -const expect = require('chai').expect +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const Block = require('ipfs-block') -const mh = require('multihashes') -const pull = require('pull-stream') +const CID = require('cids') const parallel = require('async/parallel') const waterfall = require('async/waterfall') +const each = require('async/each') +const map = require('async/map') const _ = require('lodash') +const multihashing = require('multihashing-async') module.exports = (repo) => { describe('blockstore', () => { - const helloKey = 'CIQLS/CIQLSTJHXGJU2PQIUUXFFV62PWV7VREE57RXUU4A52IIR55M4LX432I.data' - const blockCollection = _.range(100).map((i) => new Block(new Buffer(`hello-${i}-${Math.random()}`))) - const b = new Block(new Buffer('hello world')) - let bKey + const blockData = _.range(100).map((i) => new Buffer(`hello-${i}-${Math.random()}`)) + const bData = new Buffer('hello world') + let b before((done) => { - b.key((err, key) => { + multihashing(bData, 'sha2-256', (err, h) => { if (err) { return done(err) } - bKey = key + + b = new Block(bData, new CID(h)) done() }) }) - describe('.putStream', () => { + describe('.put', () => { it('simple', (done) => { - pull( - pull.values([ - { data: b.data, key: bKey } - ]), - repo.blockstore.putStream(), - pull.collect((err, meta) => { - expect(err).to.not.exist - expect(meta[0].key).to.be.eql(helloKey) - done() - }) - ) + repo.blockstore.put(b, done) }) it('multi write (locks)', (done) => { - let i = 0 - const finish = (err, meta) => { - expect(err).to.not.exist - expect(meta[0].key).to.equal(helloKey) - - i++ - if (i === 2) done() - } - - pull( - pull.values([ - { data: b.data, key: bKey } - ]), - repo.blockstore.putStream(), - pull.collect(finish) - ) - - pull( - pull.values([ - { data: b.data, key: bKey } - ]), - repo.blockstore.putStream(), - pull.collect(finish) - ) + parallel([ + (cb) => repo.blockstore.put(b, cb), + (cb) => repo.blockstore.put(b, cb) + ], done) }) it('massive multiwrite', (done) => { - parallel(_.range(50).map(() => (cb) => { - pull( - pull.values(blockCollection), - pull.asyncMap((b, cb) => { - b.key((err, key) => { - if (err) { - return cb(err) - } - cb(null, {data: b.data, key: key}) + waterfall([ + (cb) => map(_.range(100), (i, cb) => { + multihashing(blockData[i], 'sha2-256', cb) + }, cb), + (hashes, cb) => each(_.range(100), (i, cb) => { + const block = new Block(blockData[i], new CID(hashes[i])) + repo.blockstore.put(block, cb) + }, cb) + ], done) + }) + + it('.putMany', (done) => { + waterfall([ + (cb) => map(_.range(50), (i, cb) => { + const d = new Buffer('many' + Math.random()) + multihashing(d, 'sha2-256', (err, hash) => { + if (err) { + return cb(err) + } + cb(null, new Block(d, new CID(hash))) + }) + }, cb), + (blocks, cb) => { + repo.blockstore.putMany(blocks, (err) => { + expect(err).to.not.exist() + map(blocks, (b, cb) => { + repo.blockstore.get(b.cid, cb) + }, (err, res) => { + expect(err).to.not.exist() + expect(res).to.be.eql(blocks) + cb() }) - }), - repo.blockstore.putStream(), - pull.collect((err, meta) => { - expect(err).to.not.exist - expect(meta).to.have.length(100) - cb() }) - ) - }), done) + } + ], done) }) it('returns an error on invalid block', (done) => { - pull( - pull.values(['hello']), - repo.blockstore.putStream(), - pull.onEnd((err) => { - expect(err.message).to.be.eql('Invalid block') - done() - }) - ) + repo.blockstore.put('hello', (err) => { + expect(err).to.exist() + done() + }) }) }) - describe('.getStream', () => { + describe('.get', () => { it('simple', (done) => { - pull( - repo.blockstore.getStream(bKey), - pull.collect((err, data) => { - expect(err).to.not.exist - data[0].key((err, key) => { - expect(err).to.not.exist - expect(key).to.be.eql(bKey) - done() - }) - }) - ) + repo.blockstore.get(b.cid, (err, block) => { + expect(err).to.not.exist() + expect(block).to.be.eql(b) + done() + }) }) it('massive read', (done) => { parallel(_.range(20 * 100).map((i) => (cb) => { - const j = i % blockCollection.length - pull( - pull.values([blockCollection[j]]), - pull.asyncMap((b, cb) => b.key(cb)), - pull.map((key) => repo.blockstore.getStream(key)), - pull.flatten(), - pull.collect((err, meta) => { - expect(err).to.not.exist - parallel([ - (cb) => meta[0].key(cb), - (cb) => blockCollection[j].key(cb) - ], (err, res) => { - expect(err).to.not.exist - expect(res[0]).to.be.eql(res[1]) - cb() - }) - }) - ) + const j = i % blockData.length + waterfall([ + (cb) => multihashing(blockData[j], 'sha2-256', cb), + (h, cb) => { + const cid = new CID(h) + repo.blockstore.get(cid, cb) + }, + (block, cb) => { + expect(block.data).to.be.eql(blockData[j]) + cb() + } + ], cb) }), done) }) it('returns an error on invalid block', (done) => { - pull( - repo.blockstore.getStream(), - pull.onEnd((err) => { - expect(err.message).to.be.eql('Invalid key') - done() - }) - ) + repo.blockstore.get('woot', (err, val) => { + expect(err).to.exist() + expect(val).to.not.exist() + done() + }) }) }) describe('.has', () => { it('existing block', (done) => { - const b = new Block('hello world') - waterfall([ - (cb) => b.key(cb), - (key, cb) => repo.blockstore.has(key, cb) - ], (err, exists) => { - expect(err).to.not.exist - expect(exists).to.equal(true) + repo.blockstore.has(b.cid, (err, exists) => { + expect(err).to.not.exist() + expect(exists).to.eql(true) done() }) }) it('non existent block', (done) => { - const b = new Block('wooot') - - waterfall([ - (cb) => b.key(cb), - (key, cb) => repo.blockstore.has(key, cb) - ], (err, exists) => { - expect(err).to.not.exist - expect(exists).to.equal(false) + repo.blockstore.has(new CID('woot'), (err, exists) => { + expect(err).to.not.exist() + expect(exists).to.eql(false) done() }) }) @@ -181,40 +144,15 @@ module.exports = (repo) => { describe('.delete', () => { it('simple', (done) => { - const b = new Block('hello world') - b.key((err, key) => { - expect(err).to.not.exist - - waterfall([ - (cb) => repo.blockstore.delete(key, cb), - (cb) => repo.blockstore.has(key, cb) - ], (err, exists) => { - expect(err).to.not.exist - expect(exists).to.equal(false) - done() - }) + waterfall([ + (cb) => repo.blockstore.delete(b.cid, cb), + (cb) => repo.blockstore.has(b.cid, cb) + ], (err, exists) => { + expect(err).to.not.exist() + expect(exists).to.equal(false) + done() }) }) }) - - describe('interop', () => { - it('reads welcome-to-ipfs', (done) => { - const welcomeHash = mh.fromHexString( - '1220120f6af601d46e10b2d2e11ed71c55d25f3042c22501e41d1246e7a1e9d3d8ec' - ) - pull( - repo.blockstore.getStream(welcomeHash), - pull.collect((err, blocks) => { - expect(err).to.not.exist - expect( - blocks[0].data.toString() - ).to.match( - /Hello and Welcome to IPFS/ - ) - done() - }) - ) - }) - }) }) } diff --git a/test/browser.js b/test/browser.js index 35d3656c..3ecf62ba 100644 --- a/test/browser.js +++ b/test/browser.js @@ -3,54 +3,24 @@ 'use strict' const series = require('async/series') -const Store = require('idb-pull-blob-store') -const _ = require('lodash') -const pull = require('pull-stream') const IPFSRepo = require('../src') -const tests = require('./repo-test') -const repoContext = require.context('buffer!./test-repo', true) +describe('IPFS Repo Tests on the Browser', () => { + const repo = new IPFSRepo('myrepo') -const idb = self.indexedDB || - self.mozIndexedDB || - self.webkitIndexedDB || - self.msIndexedDB - -idb.deleteDatabase('ipfs') -idb.deleteDatabase('ipfs/blocks') - -// TODO use arrow funtions again when https://github.com/webpack/webpack/issues/1944 is fixed -describe('IPFS Repo Tests on the Browser', function () { - before(function (done) { - const repoData = [] - repoContext.keys().forEach(function (key) { - repoData.push({ - key: key.replace('./', ''), - value: repoContext(key) - }) - }) - - const mainBlob = new Store('ipfs') - const blocksBlob = new Store('ipfs/blocks') - - series(repoData.map((file) => (cb) => { - if (_.startsWith(file.key, 'datastore/')) { - return cb() - } - - const blocks = _.startsWith(file.key, 'blocks/') - const blob = blocks ? blocksBlob : mainBlob - - const key = blocks ? file.key.replace(/^blocks\//, '') : file.key + before((done) => { + series([ + (cb) => repo.init({}, cb), + (cb) => repo.open(cb) + ], done) + }) - pull( - pull.values([file.value]), - blob.write(key, cb) - ) - }), done) + after((done) => { + repo.close(done) }) - const repo = new IPFSRepo('ipfs', {stores: Store}) - tests(repo) + require('./repo-test')(repo) + require('./blockstore-test')(repo) + require('./datastore-test')(repo) }) diff --git a/test/datastore-test.js b/test/datastore-test.js new file mode 100644 index 00000000..d87d7fe4 --- /dev/null +++ b/test/datastore-test.js @@ -0,0 +1,91 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const parallel = require('async/parallel') +const waterfall = require('async/waterfall') +const each = require('async/each') +const _ = require('lodash') +const Key = require('interface-datastore').Key + +module.exports = (repo) => { + describe('datastore', () => { + const dataList = _.range(100).map((i) => new Buffer(`hello-${i}-${Math.random()}`)) + const data = new Buffer('hello world') + const b = new Key('hello') + + describe('.put', () => { + it('simple', (done) => { + repo.store.put(b, data, done) + }) + + it('multi write (locks)', (done) => { + parallel([ + (cb) => repo.store.put(b, data, cb), + (cb) => repo.store.put(b, data, cb) + ], done) + }) + + it('massive multiwrite', (done) => { + each(_.range(100), (i, cb) => { + repo.store.put(new Key('hello' + i), dataList[i], cb) + }, done) + }) + }) + + describe('.get', () => { + it('simple', (done) => { + repo.store.get(b, (err, val) => { + expect(err).to.not.exist() + expect(val).to.be.eql(data) + done() + }) + }) + + it('massive read', (done) => { + parallel(_.range(20 * 100).map((i) => (cb) => { + const j = i % dataList.length + repo.store.get(new Key('hello' + j), (err, val) => { + expect(err).to.not.exist() + expect(val).to.be.eql(dataList[j]) + cb() + }) + }), done) + }) + }) + + describe('.has', () => { + it('existing entry', (done) => { + repo.store.has(b, (err, exists) => { + expect(err).to.not.exist() + expect(exists).to.eql(true) + done() + }) + }) + + it('non existent block', (done) => { + repo.store.has(new Key('world'), (err, exists) => { + expect(err).to.not.exist() + expect(exists).to.eql(false) + done() + }) + }) + }) + + describe('.delete', () => { + it('simple', (done) => { + waterfall([ + (cb) => repo.store.delete(b, cb), + (cb) => repo.store.has(b, cb) + ], (err, exists) => { + expect(err).to.not.exist() + expect(exists).to.equal(false) + done() + }) + }) + }) + }) +} diff --git a/test/interop-test.js b/test/interop-test.js new file mode 100644 index 00000000..16c8d329 --- /dev/null +++ b/test/interop-test.js @@ -0,0 +1,37 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const mh = require('multihashes') +const CID = require('cids') +const Key = require('interface-datastore').Key + +module.exports = (repo) => { + describe('interop', () => { + it('reads welcome-to-ipfs', (done) => { + const welcomeHash = mh.fromHexString( + '1220120f6af601d46e10b2d2e11ed71c55d25f3042c22501e41d1246e7a1e9d3d8ec' + ) + + repo.blockstore.get(new CID(welcomeHash), (err, val) => { + expect(err).to.not.exist() + expect( + val.data.toString() + ).to.match( + /Hello and Welcome to IPFS/ + ) + done() + }) + }) + + it('reads pin set from the datastore', (done) => { + repo.store.get(new Key('/local/pins'), (err, val) => { + expect(err).to.not.exist() + expect(val).to.have.length(34) + done() + }) + }) + }) +} diff --git a/test/node.js b/test/node.js index 144eb52d..f775b1d7 100644 --- a/test/node.js +++ b/test/node.js @@ -1,34 +1,80 @@ /* eslint-env mocha */ - 'use strict' -const expect = require('chai').expect const ncp = require('ncp').ncp const rimraf = require('rimraf') const path = require('path') -const Store = require('fs-pull-blob-store') +const series = require('async/series') +const os = require('os') +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const IPFSRepo = require('../src') describe('IPFS Repo Tests on on Node.js', () => { - const testRepoPath = path.join(__dirname, 'test-repo') - const date = Date.now().toString() - const repoPath = testRepoPath + '-for-' + date - - before((done) => { - ncp(testRepoPath, repoPath, (err) => { - expect(err).to.not.exist - done() + const repos = [{ + name: 'default', + opts: undefined, + init: false + }, { + name: 'memory', + opts: { + fs: require('interface-datastore').MemoryDatastore, + level: require('memdown'), + lock: 'memory' + }, + init: true + }] + repos.forEach((r) => describe(r.name, () => { + const testRepoPath = path.join(__dirname, 'test-repo') + const date = Date.now().toString() + const repoPath = testRepoPath + '-for-' + date + + const repo = new IPFSRepo(repoPath, r.opts) + + before((done) => { + series([ + (cb) => { + if (r.init) { + repo.init({}, cb) + } else { + ncp(testRepoPath, repoPath, cb) + } + }, + (cb) => repo.open(cb) + ], done) }) - }) - after((done) => { - rimraf(repoPath, (err) => { - expect(err).to.not.exist - done() + after((done) => { + series([ + (cb) => repo.close(cb), + (cb) => rimraf(repoPath, cb) + ], done) + }) + + it('init', (done) => { + const dir = path.join(os.tmpdir(), String(Math.random()).slice(2)) + const r = new IPFSRepo(dir) + + series([ + (cb) => r.init({hello: 'world'}, cb), + (cb) => r.open(cb), + (cb) => r.config.get((err, val) => { + expect(err).to.not.exist() + expect(val).to.be.eql({hello: 'world'}) + cb() + }), + (cb) => r.close(cb), + (cb) => rimraf(dir, cb) + ], done) }) - }) - const repo = new IPFSRepo(repoPath, {stores: Store}) - require('./repo-test')(repo) + require('./repo-test')(repo) + require('./blockstore-test')(repo) + require('./datastore-test')(repo) + if (!r.init) { + require('./interop-test')(repo) + } + })) }) diff --git a/test/repo-test.js b/test/repo-test.js index f645bf51..4d48c016 100644 --- a/test/repo-test.js +++ b/test/repo-test.js @@ -1,30 +1,26 @@ /* eslint-env mocha */ 'use strict' -const expect = require('chai').expect +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect const series = require('async/series') -const Repo = require('../src/index') +const Repo = require('../src') module.exports = (repo) => { describe('IPFS Repo Tests', () => { - describe('init', () => { - it('bad repo init 1', () => { + describe('new', () => { + it('missing arguments', () => { expect( () => new Repo() ).to.throw(Error) }) - - it('bad repo init 2', () => { - expect( - () => new Repo('', {}) - ).to.throw(Error) - }) }) it('check if Repo exists', (done) => { repo.exists((err, exists) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(exists).to.equal(true) done() }) @@ -34,43 +30,10 @@ module.exports = (repo) => { expect(repo.path).to.be.a('string') }) - describe('locks', () => { - it('lock, unlock', (done) => { - series([ - (cb) => repo.locks.lock(cb), - (cb) => repo.locks.unlock(cb) - ], done) - }) - - it('lock, lock', (done) => { - series([ - (cb) => repo.locks.lock(cb), - (cb) => repo.locks.lock(cb), - (cb) => repo.locks.unlock(cb) - ], done) - - setTimeout(() => { - repo.locks.unlock((err) => { - expect(err).to.not.exist - }) - }, 500) - }) - }) - - describe('keys', () => { - it('get PrivKey', (done) => { - repo.keys.get((err, privKey) => { - expect(err).to.not.exist - expect(privKey).to.be.a('string') - done() - }) - }) - }) - describe('config', () => { it('get config', (done) => { repo.config.get((err, config) => { - expect(err).to.not.exist + expect(err).to.not.exist() expect(config).to.be.a('object') done() }) @@ -91,27 +54,22 @@ module.exports = (repo) => { describe('version', () => { it('get version', (done) => { repo.version.get((err, version) => { - expect(err).to.not.exist - expect(version).to.be.a('string') - expect(Number(version)).to.be.a('number') + expect(err).to.not.exist() + expect(version).to.be.eql(5) done() }) }) it('set version', (done) => { - repo.version.set('9000', (err) => { - expect(err).to.not.exist + repo.version.set(9000, (err) => { + expect(err).to.not.exist() repo.version.get((err, version) => { - expect(err).to.not.exist - expect(version).to.equal('9000') + expect(err).to.not.exist() + expect(version).to.equal(9000) done() }) }) }) }) - - require('./blockstore-test')(repo) - - describe('datastore', () => {}) }) } diff --git a/test/test-repo/blocks/CIQEU/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data b/test/test-repo/blocks/2F/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data similarity index 100% rename from test/test-repo/blocks/CIQEU/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data rename to test/test-repo/blocks/2F/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data diff --git a/test/test-repo/blocks/CIQFF/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data b/test/test-repo/blocks/5V/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data similarity index 100% rename from test/test-repo/blocks/CIQFF/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data rename to test/test-repo/blocks/5V/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data diff --git a/test/test-repo/blocks/CIQMB/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data b/test/test-repo/blocks/75/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data similarity index 100% rename from test/test-repo/blocks/CIQMB/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data rename to test/test-repo/blocks/75/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data diff --git a/test/test-repo/blocks/CIQKK/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data b/test/test-repo/blocks/7J/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data similarity index 100% rename from test/test-repo/blocks/CIQKK/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data rename to test/test-repo/blocks/7J/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data diff --git a/test/test-repo/blocks/CIQON/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data b/test/test-repo/blocks/AE/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data similarity index 100% rename from test/test-repo/blocks/CIQON/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data rename to test/test-repo/blocks/AE/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data diff --git a/test/test-repo/blocks/CIQHA/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data b/test/test-repo/blocks/AP/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data similarity index 100% rename from test/test-repo/blocks/CIQHA/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data rename to test/test-repo/blocks/AP/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data diff --git a/test/test-repo/blocks/CIQDD/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data b/test/test-repo/blocks/C4/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data similarity index 100% rename from test/test-repo/blocks/CIQDD/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data rename to test/test-repo/blocks/C4/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data diff --git a/test/test-repo/blocks/CIQDM/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data b/test/test-repo/blocks/CY/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data similarity index 100% rename from test/test-repo/blocks/CIQDM/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data rename to test/test-repo/blocks/CY/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data diff --git a/test/test-repo/blocks/CIQIX/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data b/test/test-repo/blocks/FN/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data similarity index 100% rename from test/test-repo/blocks/CIQIX/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data rename to test/test-repo/blocks/FN/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data diff --git a/test/test-repo/blocks/CIQH7/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data b/test/test-repo/blocks/GQ/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data similarity index 100% rename from test/test-repo/blocks/CIQH7/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data rename to test/test-repo/blocks/GQ/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data diff --git a/test/test-repo/blocks/CIQDD/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data b/test/test-repo/blocks/HD/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data similarity index 100% rename from test/test-repo/blocks/CIQDD/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data rename to test/test-repo/blocks/HD/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data diff --git a/test/test-repo/blocks/CIQJF/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data b/test/test-repo/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data similarity index 100% rename from test/test-repo/blocks/CIQJF/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data rename to test/test-repo/blocks/IL/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data diff --git a/test/test-repo/blocks/CIQJB/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data b/test/test-repo/blocks/LG/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data similarity index 100% rename from test/test-repo/blocks/CIQJB/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data rename to test/test-repo/blocks/LG/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data diff --git a/test/test-repo/blocks/CIQOY/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data b/test/test-repo/blocks/O6/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data similarity index 100% rename from test/test-repo/blocks/CIQOY/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data rename to test/test-repo/blocks/O6/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data diff --git a/test/test-repo/blocks/CIQGP/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data b/test/test-repo/blocks/QF/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data similarity index 100% rename from test/test-repo/blocks/CIQGP/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data rename to test/test-repo/blocks/QF/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data diff --git a/test/test-repo/blocks/CIQOH/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data b/test/test-repo/blocks/QV/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data similarity index 100% rename from test/test-repo/blocks/CIQOH/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data rename to test/test-repo/blocks/QV/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data diff --git a/test/test-repo/blocks/CIQBE/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data b/test/test-repo/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data similarity index 100% rename from test/test-repo/blocks/CIQBE/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data rename to test/test-repo/blocks/R3/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data diff --git a/test/test-repo/blocks/CIQHB/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data b/test/test-repo/blocks/S5/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data similarity index 100% rename from test/test-repo/blocks/CIQHB/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data rename to test/test-repo/blocks/S5/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data diff --git a/test/test-repo/blocks/SHARDING b/test/test-repo/blocks/SHARDING new file mode 100644 index 00000000..a153331d --- /dev/null +++ b/test/test-repo/blocks/SHARDING @@ -0,0 +1 @@ +/repo/flatfs/shard/v1/next-to-last/2 diff --git a/test/test-repo/blocks/CIQFE/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data b/test/test-repo/blocks/TW/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data similarity index 100% rename from test/test-repo/blocks/CIQFE/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data rename to test/test-repo/blocks/TW/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data diff --git a/test/test-repo/blocks/CIQOM/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data b/test/test-repo/blocks/UN/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data similarity index 100% rename from test/test-repo/blocks/CIQOM/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data rename to test/test-repo/blocks/UN/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data diff --git a/test/test-repo/blocks/CIQGF/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data b/test/test-repo/blocks/VO/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data similarity index 100% rename from test/test-repo/blocks/CIQGF/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data rename to test/test-repo/blocks/VO/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data diff --git a/test/test-repo/blocks/CIQFT/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data b/test/test-repo/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data similarity index 100% rename from test/test-repo/blocks/CIQFT/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data rename to test/test-repo/blocks/X3/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data diff --git a/test/test-repo/blocks/CIQJG/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data b/test/test-repo/blocks/XO/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data similarity index 100% rename from test/test-repo/blocks/CIQJG/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data rename to test/test-repo/blocks/XO/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data diff --git a/test/test-repo/blocks/_README b/test/test-repo/blocks/_README new file mode 100644 index 00000000..23cb0909 --- /dev/null +++ b/test/test-repo/blocks/_README @@ -0,0 +1,30 @@ +This is a repository of IPLD objects. Each IPLD object is in a single file, +named .data. Where is the +"base32" encoding of the CID (as specified in +https://github.com/multiformats/multibase) without the 'B' prefix. +All the object files are placed in a tree of directories, based on a +function of the CID. This is a form of sharding similar to +the objects directory in git repositories. Previously, we used +prefixes, we now use the next-to-last two charters. + + func NextToLast(base32cid string) { + nextToLastLen := 2 + offset := len(base32cid) - nextToLastLen - 1 + return str[offset : offset+nextToLastLen] + } + +For example, an object with a base58 CIDv1 of + + zb2rhYSxw4ZjuzgCnWSt19Q94ERaeFhu9uSqRgjSdx9bsgM6f + +has a base32 CIDv1 of + + BAFKREIA22FLID5AJ2KU7URG47MDLROZIH6YF2KALU2PWEFPVI37YLKRSCA + +and will be placed at + + SC/AFKREIA22FLID5AJ2KU7URG47MDLROZIH6YF2KALU2PWEFPVI37YLKRSCA.data + +with 'SC' being the last-to-next two characters and the 'B' at the +beginning of the CIDv1 string is the multibase prefix that is not +stored in the filename. diff --git a/test/test-repo/version b/test/test-repo/version index b8626c4c..7ed6ff82 100644 --- a/test/test-repo/version +++ b/test/test-repo/version @@ -1 +1 @@ -4 +5