Skip to content
This repository has been archived by the owner on Oct 1, 2021. It is now read-only.

chore: upgrade to new multiformats module #98

Merged
merged 14 commits into from
Jul 7, 2021
Merged
14 changes: 2 additions & 12 deletions .aegir.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,7 @@ const path = require('path')

const esbuild = {
// this will inject all the named exports from 'node-globals.js' as globals
inject: [path.join(__dirname, 'scripts/node-globals.js')],
plugins: [
{
name: 'node built ins', // this will make the bundler resolve node builtins to the respective browser polyfill
setup (build) {
build.onResolve({ filter: /^stream$/ }, () => {
return { path: require.resolve('readable-stream') }
})
}
}
]
inject: [path.join(__dirname, 'scripts/node-globals.js')]
}

/** @type {import('aegir').PartialOptions} */
Expand All @@ -27,6 +17,6 @@ module.exports = {
}
},
build: {
config: esbuild
bundlesizeMax: '37kB'
}
}
42 changes: 28 additions & 14 deletions migrations/migration-10/index.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
'use strict'

const {
createStore,
findLevelJs
} = require('../../src/utils')
const fromString = require('uint8arrays/from-string')
Expand All @@ -10,6 +9,7 @@ const toString = require('uint8arrays/to-string')
/**
* @typedef {import('../../src/types').Migration} Migration
* @typedef {import('interface-datastore').Datastore} Datastore
* @typedef {import('interface-blockstore').Blockstore} Blockstore
* @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback
*
* @typedef {{ type: 'del', key: string | Uint8Array } | { type: 'put', key: string | Uint8Array, value: Uint8Array }} Operation
Expand Down Expand Up @@ -78,18 +78,32 @@ async function keysToStrings (name, store, onProgress = () => {}) {
}

/**
*
* @param {string} repoPath
* @param {any} repoOptions
* @param {any} store
* @returns {Datastore}
*/
function unwrap (store) {
if (store.child) {
return unwrap(store.child)
}

return store
}

/**
* @param {import('../../src/types').Backends} backends
* @param {MigrationProgressCallback} onProgress
* @param {*} fn
*/
async function process (repoPath, repoOptions, onProgress, fn) {
const datastores = Object.keys(repoOptions.storageBackends)
.filter(key => repoOptions.storageBackends[key].name === 'LevelDatastore')
.map(name => ({
name,
store: createStore(repoPath, name, repoOptions)
async function process (backends, onProgress, fn) {
/**
* @type {{ name: string, store: Datastore }[]}
*/
const datastores = Object.entries(backends)
.map(([key, backend]) => ({ key, backend: unwrap(backend) }))
.filter(({ key, backend }) => backend.constructor.name === 'LevelDatastore')
.map(({ key, backend }) => ({
name: key,
store: backend
}))

onProgress(0, `Migrating ${datastores.length} dbs`)
Expand Down Expand Up @@ -120,11 +134,11 @@ async function process (repoPath, repoOptions, onProgress, fn) {
module.exports = {
version: 10,
description: 'Migrates datastore-level keys to binary',
migrate: (repoPath, repoOptions, onProgress = () => {}) => {
return process(repoPath, repoOptions, onProgress, keysToBinary)
migrate: (backends, onProgress = () => {}) => {
return process(backends, onProgress, keysToBinary)
},
revert: (repoPath, repoOptions, onProgress = () => {}) => {
return process(repoPath, repoOptions, onProgress, keysToStrings)
revert: (backends, onProgress = () => {}) => {
return process(backends, onProgress, keysToStrings)
}
}

Expand Down
83 changes: 53 additions & 30 deletions migrations/migration-8/index.js
Original file line number Diff line number Diff line change
@@ -1,60 +1,82 @@
'use strict'

const CID = require('cids')
const { CID } = require('multiformats/cid')
const Key = require('interface-datastore').Key
const mb = require('multibase')
const log = require('debug')('ipfs:repo:migrator:migration-8')
const uint8ArrayToString = require('uint8arrays/to-string')
const { createStore } = require('../../src/utils')

const length = require('it-length')
const { base32 } = require('multiformats/bases/base32')
const raw = require('multiformats/codecs/raw')
const mhd = require('multiformats/hashes/digest')

/**
* @typedef {import('../../src/types').Migration} Migration
* @typedef {import('interface-datastore').Datastore} Datastore
*/

/**
* @param {*} blockstore
* @returns {Datastore}
*/
function unwrap (blockstore) {
if (blockstore.child) {
return unwrap(blockstore.child)
}

return blockstore
}

/**
* @param {Key} key
*/
function keyToMultihash (key) {
const buf = mb.decode(`b${key.toString().slice(1)}`)

// Extract multihash from CID
let multihash = new CID(buf).multihash
try {
const buf = base32.decode(`b${key.toString().toLowerCase().slice(1)}`)

// Encode and slice off multibase codec
multihash = mb.encode('base32', multihash).slice(1)
// Extract multihash from CID
let multihash = CID.decode(buf).multihash.bytes

// Should be uppercase for interop with go
const multihashStr = uint8ArrayToString(multihash).toUpperCase()
// Encode and slice off multibase codec
// Should be uppercase for interop with go
const multihashStr = base32.encode(multihash).slice(1).toUpperCase()

return new Key(`/${multihashStr}`, false)
return new Key(`/${multihashStr}`, false)
} catch (err) {
return key
}
}

/**
* @param {Key} key
*/
function keyToCid (key) {
const buf = mb.decode(`b${key.toString().slice(1)}`)
try {
const buf = base32.decode(`b${key.toString().toLowerCase().slice(1)}`)
const digest = mhd.decode(buf)

// CID to Key
const multihash = mb.encode('base32', new CID(1, 'raw', buf).bytes).slice(1)
// CID to Key
const multihash = base32.encode(CID.createV1(raw.code, digest).bytes).slice(1)

return new Key(`/${uint8ArrayToString(multihash)}`.toUpperCase(), false)
return new Key(`/${multihash.toUpperCase()}`, false)
} catch {
return key
}
}

/**
* @param {string} repoPath
* @param {*} repoOptions
* @param {import('../../src/types').Backends} backends
* @param {(percent: number, message: string) => void} onProgress
* @param {(key: Key) => Key} keyFunction
*/
async function process (repoPath, repoOptions, onProgress, keyFunction) {
const blockstore = createStore(repoPath, 'blocks', repoOptions)
async function process (backends, onProgress, keyFunction) {
const blockstore = backends.blocks
await blockstore.open()

const unwrapped = unwrap(blockstore)

let blockCount

blockCount = await length(blockstore.queryKeys({
blockCount = await length(unwrapped.queryKeys({
filters: [(key) => {
const newKey = keyFunction(key)

Expand All @@ -65,15 +87,16 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) {
try {
let counter = 0

for await (const block of blockstore.query({})) {
for await (const block of unwrapped.query({})) {
const newKey = keyFunction(block.key)

// If the Key is base32 CIDv0 then there's nothing to do
if(newKey.toString() !== block.key.toString()) {
counter += 1
log(`Migrating Block from ${block.key} to ${newKey}`)
await blockstore.delete(block.key)
await blockstore.put(newKey, block.value)
log(`Migrating Block from ${block.key} to ${newKey}`, await unwrapped.has(block.key))

await unwrapped.delete(block.key)
await unwrapped.put(newKey, block.value)

onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`)
}
Expand All @@ -87,10 +110,10 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) {
module.exports = {
version: 8,
description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32',
migrate: (repoPath, repoOptions, onProgress = () => {}) => {
return process(repoPath, repoOptions, onProgress, keyToMultihash)
migrate: (backends, onProgress = () => {}) => {
return process(backends, onProgress, keyToMultihash)
},
revert: (repoPath, repoOptions, onProgress = () => {}) => {
return process(repoPath, repoOptions, onProgress, keyToCid)
revert: (backends, onProgress = () => {}) => {
return process(backends, onProgress, keyToCid)
}
}
Loading