From 05794f5cb35eb322965d33a045ab68dffc63b21a Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Fri, 29 Jun 2018 22:39:28 +0530 Subject: [PATCH 01/40] indexer: add module indexer module indexer introduces a extensible architecture for indexing the chain. It provides a base class which handles syncing with the chain, handling re-orgs, interruptions, dynamic toggling, etc. TXIndexer and AddrIndexer are provided for indexing transactions and addresses, using the same flags as before i.e --index-tx and --index-address. Indexes are stored in a different database and can be maintained independently of the chain. --- lib/bcoin-browser.js | 7 + lib/bcoin.js | 7 + lib/blockchain/chain.js | 16 +- lib/blockchain/chaindb.js | 278 +++---------- lib/blockchain/layout.js | 6 +- lib/indexer/addrindexer.js | 197 +++++++++ lib/indexer/chainclient.js | 200 +++++++++ lib/indexer/index.js | 16 + lib/indexer/indexer.js | 812 +++++++++++++++++++++++++++++++++++++ lib/indexer/layout.js | 31 ++ lib/indexer/nullclient.js | 142 +++++++ lib/indexer/records.js | 221 ++++++++++ lib/indexer/txindexer.js | 151 +++++++ lib/node/fullnode.js | 84 +++- lib/node/node.js | 2 + lib/node/rpc.js | 4 +- test/indexer-test.js | 129 ++++++ test/node-test.js | 51 +++ test/util/reorg.js | 63 +++ 19 files changed, 2171 insertions(+), 246 deletions(-) create mode 100644 lib/indexer/addrindexer.js create mode 100644 lib/indexer/chainclient.js create mode 100644 lib/indexer/index.js create mode 100644 lib/indexer/indexer.js create mode 100644 lib/indexer/layout.js create mode 100644 lib/indexer/nullclient.js create mode 100644 lib/indexer/records.js create mode 100644 lib/indexer/txindexer.js create mode 100644 test/indexer-test.js create mode 100644 test/util/reorg.js diff --git a/lib/bcoin-browser.js b/lib/bcoin-browser.js index 9240c28d7..711818adf 100644 --- a/lib/bcoin-browser.js +++ b/lib/bcoin-browser.js @@ -55,6 +55,13 @@ bcoin.HDPrivateKey = require('./hd/private'); bcoin.HDPublicKey = require('./hd/public'); bcoin.Mnemonic = require('./hd/mnemonic'); +// Index +bcoin.indexer = require('./indexer'); +bcoin.Indexer = require('./indexer/indexer'); +bcoin.ChainClient = require('./indexer/chainclient'); +bcoin.TXIndexer = require('./indexer/txindexer'); +bcoin.AddrIndexer = require('./indexer/addrindexer'); + // Mempool bcoin.mempool = require('./mempool'); bcoin.Fees = require('./mempool/fees'); diff --git a/lib/bcoin.js b/lib/bcoin.js index 72ab240b3..8bff04423 100644 --- a/lib/bcoin.js +++ b/lib/bcoin.js @@ -76,6 +76,13 @@ bcoin.define('HDPrivateKey', './hd/private'); bcoin.define('HDPublicKey', './hd/public'); bcoin.define('Mnemonic', './hd/mnemonic'); +// Index +bcoin.define('indexer', './indexer'); +bcoin.define('Indexer', './indexer/indexer'); +bcoin.define('ChainClient', './indexer/chainclient'); +bcoin.define('TXIndexer', './indexer/txindexer'); +bcoin.define('AddrIndexer', './indexer/addrindexer'); + // Mempool bcoin.define('mempool', './mempool'); bcoin.define('Fees', './mempool/fees'); diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index 026acb9a2..19cfea935 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -2059,14 +2059,14 @@ class Chain extends AsyncEmitter { /** * Get coin viewpoint (spent). - * @param {TX} tx + * @param {TXMeta} meta * @returns {Promise} - Returns {@link CoinView}. */ - async getSpentView(tx) { + async getSpentView(meta) { const unlock = await this.locker.lock(); try { - return await this.db.getSpentView(tx); + return await this.db.getSpentView(meta); } finally { unlock(); } @@ -2766,11 +2766,6 @@ class ChainOptions { this.compression = options.compression; } - if (options.prune != null) { - assert(typeof options.prune === 'boolean'); - this.prune = options.prune; - } - if (options.indexTX != null) { assert(typeof options.indexTX === 'boolean'); this.indexTX = options.indexTX; @@ -2781,6 +2776,11 @@ class ChainOptions { this.indexAddress = options.indexAddress; } + if (options.prune != null) { + assert(typeof options.prune === 'boolean'); + this.prune = options.prune; + } + if (options.forceFlags != null) { assert(typeof options.forceFlags === 'boolean'); this.forceFlags = options.forceFlags; diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index f24e6f975..9f7a779e8 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -11,7 +11,7 @@ const assert = require('bsert'); const bdb = require('bdb'); const bio = require('bufio'); const LRU = require('blru'); -const {BufferMap, BufferSet} = require('buffer-map'); +const {BufferMap} = require('buffer-map'); const Amount = require('../btc/amount'); const Network = require('../protocol/network'); const CoinView = require('../coins/coinview'); @@ -20,9 +20,7 @@ const layout = require('./layout'); const consensus = require('../protocol/consensus'); const Block = require('../primitives/block'); const Outpoint = require('../primitives/outpoint'); -const Address = require('../primitives/address'); const ChainEntry = require('./chainentry'); -const TXMeta = require('../primitives/txmeta'); const CoinEntry = require('../coins/coinentry'); /** @@ -573,18 +571,12 @@ class ChainDB { if (!options.prune && flags.prune) throw new Error('Cannot retroactively unprune.'); - if (options.indexTX && !flags.indexTX) + if (options.prune && options.indexTX && !flags.indexTX) throw new Error('Cannot retroactively enable TX indexing.'); - if (!options.indexTX && flags.indexTX) - throw new Error('Cannot retroactively disable TX indexing.'); - - if (options.indexAddress && !flags.indexAddress) + if (options.prune && options.indexAddress && !flags.indexAddress) throw new Error('Cannot retroactively enable address indexing.'); - if (!options.indexAddress && flags.indexAddress) - throw new Error('Cannot retroactively disable address indexing.'); - if (needsSave) { await this.logger.info('Rewriting chain flags.'); await this.saveFlags(); @@ -978,30 +970,16 @@ class ChainDB { /** * Get coin viewpoint (historical). - * @param {TX} tx + * @param {TXMeta} meta * @returns {Promise} - Returns {@link CoinView}. */ - async getSpentView(tx) { - const view = await this.getCoinView(tx); - - for (const {prevout} of tx.inputs) { - if (view.hasEntry(prevout)) - continue; - - const {hash, index} = prevout; - const meta = await this.getMeta(hash); - - if (!meta) - continue; - - const {tx, height} = meta; - - if (index < tx.outputs.length) - view.addIndex(tx, index, height); - } - - return view; + async getSpentView(meta) { + process.emitWarning( + 'deprecated, use node.txindex.getSpentView', + 'DeprecationWarning' + ); + return null; } /** @@ -1083,152 +1061,105 @@ class ChainDB { /** * Get a transaction with metadata. * @param {Hash} hash + * @deprecated * @returns {Promise} - Returns {@link TXMeta}. */ async getMeta(hash) { - if (!this.options.indexTX) - return null; - - const data = await this.db.get(layout.t.encode(hash)); - - if (!data) - return null; - - return TXMeta.fromRaw(data); + process.emitWarning( + 'deprecated, use node.txindex.getMeta', + 'DeprecationWarning' + ); + return null; } /** * Retrieve a transaction. * @param {Hash} hash + * @deprecated * @returns {Promise} - Returns {@link TX}. */ async getTX(hash) { - const meta = await this.getMeta(hash); - - if (!meta) - return null; - - return meta.tx; + process.emitWarning( + 'deprecated, use node.txindex.getTX', + 'DeprecationWarning' + ); + return null; } /** * @param {Hash} hash + * @deprecated * @returns {Promise} - Returns Boolean. */ async hasTX(hash) { - if (!this.options.indexTX) - return false; - - return this.db.has(layout.t.encode(hash)); + process.emitWarning( + 'deprecated, use node.txindex.hasTX', + 'DeprecationWarning' + ); + return false; } /** * Get all coins pertinent to an address. * @param {Address[]} addrs + * @deprecated * @returns {Promise} - Returns {@link Coin}[]. */ async getCoinsByAddress(addrs) { - if (!this.options.indexAddress) - return []; - - if (!Array.isArray(addrs)) - addrs = [addrs]; - - const coins = []; - - for (const addr of addrs) { - const hash = Address.getHash(addr); - - const keys = await this.db.keys({ - gte: layout.C.min(hash), - lte: layout.C.max(hash), - parse: (key) => { - const [, txid, index] = layout.C.decode(key); - return [txid, index]; - } - }); - - for (const [hash, index] of keys) { - const coin = await this.getCoin(hash, index); - assert(coin); - coins.push(coin); - } - } - - return coins; + process.emitWarning( + 'deprecated, use node.addrindex.getCoinsByAddress', + 'DeprecationWarning' + ); + return []; } /** * Get all transaction hashes to an address. * @param {Address[]} addrs + * @deprecated * @returns {Promise} - Returns {@link Hash}[]. */ async getHashesByAddress(addrs) { - if (!this.options.indexTX || !this.options.indexAddress) - return []; - - const set = new BufferSet(); - - for (const addr of addrs) { - const hash = Address.getHash(addr); - - await this.db.keys({ - gte: layout.T.min(hash), - lte: layout.T.max(hash), - parse: (key) => { - const [, txid] = layout.T.decode(key); - set.add(txid); - } - }); - } - - return set.toArray(); + process.emitWarning( + 'deprecated, use node.addrindex.getHashesByAddress', + 'DeprecationWarning' + ); + return []; } /** * Get all transactions pertinent to an address. * @param {Address[]} addrs + * @deprecated * @returns {Promise} - Returns {@link TX}[]. */ async getTXByAddress(addrs) { - const mtxs = await this.getMetaByAddress(addrs); - const out = []; - - for (const mtx of mtxs) - out.push(mtx.tx); - - return out; + process.emitWarning( + 'deprecated, use node.addrindex.getHashesByAddress', + 'DeprecationWarning' + ); + return []; } /** * Get all transactions pertinent to an address. * @param {Address[]} addrs + * @deprecated * @returns {Promise} - Returns {@link TXMeta}[]. */ async getMetaByAddress(addrs) { - if (!this.options.indexTX || !this.options.indexAddress) - return []; - - if (!Array.isArray(addrs)) - addrs = [addrs]; - - const hashes = await this.getHashesByAddress(addrs); - const mtxs = []; - - for (const hash of hashes) { - const mtx = await this.getMeta(hash); - assert(mtx); - mtxs.push(mtx); - } - - return mtxs; + process.emitWarning( + 'deprecated, use node.addrindex.getMetaByAddress', + 'DeprecationWarning' + ); + return []; } /** @@ -1771,9 +1702,6 @@ class ChainDB { this.pending.add(output); } - - // Index the transaction if enabled. - this.indexTX(tx, view, entry, i); } // Commit new coin state. @@ -1828,9 +1756,6 @@ class ChainDB { this.pending.spend(output); } - - // Remove from transaction index. - this.unindexTX(tx, view); } // Undo coins should be empty. @@ -1882,105 +1807,6 @@ class ChainDB { b.put(layout.O.encode(), flags.toRaw()); return b.write(); } - - /** - * Index a transaction by txid and address. - * @private - * @param {TX} tx - * @param {CoinView} view - * @param {ChainEntry} entry - * @param {Number} index - */ - - indexTX(tx, view, entry, index) { - const hash = tx.hash(); - - if (this.options.indexTX) { - const meta = TXMeta.fromTX(tx, entry, index); - - this.put(layout.t.encode(hash), meta.toRaw()); - - if (this.options.indexAddress) { - for (const addr of tx.getHashes(view)) - this.put(layout.T.encode(addr, hash), null); - } - } - - if (!this.options.indexAddress) - return; - - if (!tx.isCoinbase()) { - for (const {prevout} of tx.inputs) { - const {hash, index} = prevout; - const coin = view.getOutput(prevout); - assert(coin); - - const addr = coin.getHash(); - - if (!addr) - continue; - - this.del(layout.C.encode(addr, hash, index)); - } - } - - for (let i = 0; i < tx.outputs.length; i++) { - const output = tx.outputs[i]; - const addr = output.getHash(); - - if (!addr) - continue; - - this.put(layout.C.encode(addr, hash, i), null); - } - } - - /** - * Remove transaction from index. - * @private - * @param {TX} tx - * @param {CoinView} view - */ - - unindexTX(tx, view) { - const hash = tx.hash(); - - if (this.options.indexTX) { - this.del(layout.t.encode(hash)); - if (this.options.indexAddress) { - for (const addr of tx.getHashes(view)) - this.del(layout.T.encode(addr, hash)); - } - } - - if (!this.options.indexAddress) - return; - - if (!tx.isCoinbase()) { - for (const {prevout} of tx.inputs) { - const {hash, index} = prevout; - const coin = view.getOutput(prevout); - assert(coin); - - const addr = coin.getHash(); - - if (!addr) - continue; - - this.put(layout.C.encode(addr, hash, index), null); - } - } - - for (let i = 0; i < tx.outputs.length; i++) { - const output = tx.outputs[i]; - const addr = output.getHash(); - - if (!addr) - continue; - - this.del(layout.C.encode(addr, hash, i)); - } - } } /** diff --git a/lib/blockchain/layout.js b/lib/blockchain/layout.js index 01aaa0864..337f95900 100644 --- a/lib/blockchain/layout.js +++ b/lib/blockchain/layout.js @@ -20,12 +20,12 @@ const bdb = require('bdb'); * n[hash] -> next hash * p[hash] -> tip index * b[hash] -> block (deprecated) - * t[hash] -> extended tx + * t[hash] -> extended tx (deprecated) * c[hash] -> coins * u[hash] -> undo coins (deprecated) * v[bit][hash] -> versionbits state - * T[addr-hash][hash] -> dummy (tx by address) - * C[addr-hash][hash][index] -> dummy (coin by address) + * T[addr-hash][hash] -> dummy (tx by address) (deprecated) + * C[addr-hash][hash][index] -> dummy (coin by address) (deprecated) */ const layout = { diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js new file mode 100644 index 000000000..6d274328c --- /dev/null +++ b/lib/indexer/addrindexer.js @@ -0,0 +1,197 @@ +/*! + * addrindexer.js - addr indexer + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('assert'); +const bdb = require('bdb'); +const {BufferSet} = require('buffer-map'); +const layout = require('./layout'); +const Address = require('../primitives/address'); +const Indexer = require('./indexer'); + +/* + * AddrIndexer Database Layout: + * T[addr-hash][hash] -> dummy (tx by address) + * C[addr-hash][hash][index] -> dummy (coin by address) +*/ + +Object.assign(layout, { + T: bdb.key('T', ['hash', 'hash256']), + C: bdb.key('C', ['hash', 'hash256', 'uint32']) +}); + +/** + * AddrIndexer + * @alias module:indexer.AddrIndexer + * @extends Indexer + */ + +class AddrIndexer extends Indexer { + /** + * Create a indexer + * @constructor + * @param {Object} options + */ + + constructor(options) { + super('addr', options); + + this.db = bdb.create(this.options); + } + + /** + * Index transactions by address. + * @private + * @param {ChainEntry} entry + * @param {Block} block + * @param {CoinView} view + */ + + async indexBlock(entry, block, view) { + const b = this.db.batch(); + + for (let i = 0; i < block.txs.length; i++) { + const tx = block.txs[i]; + const hash = tx.hash(); + for (const addr of tx.getHashes(view)) + b.put(layout.T.encode(addr, hash), null); + + if (!tx.isCoinbase()) { + for (const {prevout} of tx.inputs) { + const {hash, index} = prevout; + const coin = view.getOutput(prevout); + assert(coin); + + const addr = coin.getHash(); + + if (!addr) + continue; + + b.del(layout.C.encode(addr, hash, index)); + } + } + + for (let i = 0; i < tx.outputs.length; i++) { + const output = tx.outputs[i]; + const addr = output.getHash(); + + if (!addr) + continue; + + b.put(layout.C.encode(addr, hash, i), null); + } + } + + return b.write(); + } + + /** + * Remove addresses from index. + * @private + * @param {ChainEntry} entry + * @param {Block} block + * @param {CoinView} view + */ + + async unindexBlock(entry, block, view) { + const b = this.db.batch(); + for (let i = 0; i < block.txs.length; i++) { + const tx = block.txs[i]; + const hash = tx.hash(); + for (const addr of tx.getHashes(view)) + b.del(layout.T.encode(addr, hash)); + + if (!tx.isCoinbase()) { + for (const {prevout} of tx.inputs) { + const {hash, index} = prevout; + const coin = view.getOutput(prevout); + assert(coin); + + const addr = coin.getHash(); + + if (!addr) + continue; + + b.put(layout.C.encode(addr, hash, index), null); + } + } + + for (let i = 0; i < tx.outputs.length; i++) { + const output = tx.outputs[i]; + const addr = output.getHash(); + + if (!addr) + continue; + + b.del(layout.C.encode(addr, hash, i)); + } + } + + return b.write(); + } + + /** + * Get all coins pertinent to an address. + * @param {Address[]} addrs + * @returns {Promise} - Returns {@link Coin}[]. + */ + + async getCoinsByAddress(addrs) { + if (!Array.isArray(addrs)) + addrs = [addrs]; + + const coins = []; + + for (const addr of addrs) { + const hash = Address.getHash(addr); + + const keys = await this.db.keys({ + gte: layout.C.min(hash), + lte: layout.C.max(hash), + parse: (key) => { + const [, txid, index] = layout.C.decode(key); + return [txid, index]; + } + }); + + for (const [hash, index] of keys) { + const coin = await this.client.getCoin(hash, index); + assert(coin); + coins.push(coin); + } + } + + return coins; + } + + /** + * Get all transaction hashes to an address. + * @param {Address[]} addrs + * @returns {Promise} - Returns {@link Hash}[]. + */ + + async getHashesByAddress(addrs) { + const set = new BufferSet(); + + for (const addr of addrs) { + const hash = Address.getHash(addr); + + await this.db.keys({ + gte: layout.T.min(hash), + lte: layout.T.max(hash), + parse: (key) => { + const [, txid] = layout.T.decode(key); + set.add(txid); + } + }); + } + + return set.toArray(); + } +} + +module.exports = AddrIndexer; diff --git a/lib/indexer/chainclient.js b/lib/indexer/chainclient.js new file mode 100644 index 000000000..cd86d25bf --- /dev/null +++ b/lib/indexer/chainclient.js @@ -0,0 +1,200 @@ +/*! + * chainclient.js - chain client for bcoin + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('assert'); +const AsyncEmitter = require('bevent'); +const Chain = require('../blockchain/chain'); + +/** + * Chain Client + * @extends AsyncEmitter + * @alias module:indexer.ChainClient + */ + +class ChainClient extends AsyncEmitter { + /** + * Create a chain client. + * @constructor + * @param {Chain} chain + */ + + constructor(chain) { + super(); + + assert(chain instanceof Chain); + + this.chain = chain; + this.network = chain.network; + this.opened = false; + + this.init(); + } + + /** + * Initialize the client. + */ + + init() { + this.chain.on('connect', async (entry, block, view) => { + if (!this.opened) + return; + + await this.emitAsync('block connect', entry, block, view); + }); + + this.chain.on('disconnect', async (entry, block, view) => { + if (!this.opened) + return; + + await this.emitAsync('block disconnect', entry, block, view); + }); + + this.chain.on('reset', async (tip) => { + if (!this.opened) + return; + + await this.emitAsync('chain reset', tip); + }); + } + + /** + * Open the client. + * @returns {Promise} + */ + + async open(options) { + assert(!this.opened, 'ChainClient is already open.'); + this.opened = true; + setImmediate(() => this.emit('connect')); + } + + /** + * Close the client. + * @returns {Promise} + */ + + async close() { + assert(this.opened, 'ChainClient is not open.'); + this.opened = false; + setImmediate(() => this.emit('disconnect')); + } + + /** + * Get chain tip. + * @returns {Promise} + */ + + async getTip() { + return this.chain.tip; + } + + /** + * Get chain entry. + * @param {Hash} hash + * @returns {Promise} - Returns {@link ChainEntry}. + */ + + async getEntry(hash) { + const entry = await this.chain.getEntry(hash); + + if (!entry) + return null; + + if (!await this.chain.isMainChain(entry)) + return null; + + return entry; + } + + /** + * Get a coin (unspents only). + * @param {Hash} hash + * @param {Number} index + * @returns {Promise} - Returns {@link Coin}. + */ + + async getCoin(hash, index) { + return this.chain.getCoin(hash, index); + } + + /** + * Get hash range. + * @param {Number} start + * @param {Number} end + * @returns {Promise} + */ + + async getHashes(start = -1, end = -1) { + return this.chain.getHashes(start, end); + } + + /** + * Get block + * @param {Hash} hash + * @returns {Promise} - Returns {@link Block} + */ + + async getBlock(hash) { + const block = await this.chain.getBlock(hash); + + if (!block) + return null; + + return block; + } + + /** + * Get a historical block coin viewpoint. + * @param {Block} hash + * @returns {Promise} - Returns {@link CoinView}. + */ + + async getBlockView(block) { + return this.chain.getBlockView(block); + } + + /** + * Get coin viewpoint. + * @param {TX} tx + * @returns {Promise} - Returns {@link CoinView}. + */ + + async getCoinView(tx) { + return this.chain.getCoinView(tx); + } + + /** + * Rescan for any missed blocks. + * @param {Number} start - Start block. + * @returns {Promise} + */ + + async rescan(start) { + for (let i = start; ; i++) { + const entry = await this.getEntry(i); + if (!entry) { + await this.emitAsync('chain tip'); + break; + }; + + const block = await this.getBlock(entry.hash); + assert(block); + + const view = await this.getBlockView(block); + assert(view); + + await this.emitAsync('block rescan', entry, block, view); + } + }; +} + +/* + * Expose + */ + +module.exports = ChainClient; diff --git a/lib/indexer/index.js b/lib/indexer/index.js new file mode 100644 index 000000000..96ad09a3f --- /dev/null +++ b/lib/indexer/index.js @@ -0,0 +1,16 @@ +/*! + * index.js - indexer for bcoin + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +/** + * @module indexer + */ + +exports.Indexer = require('./indexer'); +exports.TXIndexer = require('./txindexer'); +exports.AddrIndexer = require('./addrindexer'); +exports.ChainClient = require('./chainclient'); diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js new file mode 100644 index 000000000..b894d2253 --- /dev/null +++ b/lib/indexer/indexer.js @@ -0,0 +1,812 @@ +/*! + * indexer.js - storage for indexes + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('assert'); +const path = require('path'); +const fs = require('bfile'); +const EventEmitter = require('events'); +const {Lock} = require('bmutex'); +const Logger = require('blgr'); +const Network = require('../protocol/network'); +const layout = require('./layout'); +const records = require('./records'); +const ChainClient = require('./chainclient'); +const NullClient = require('./nullclient'); + +const { + ChainState, + BlockMeta +} = records; + +/** + * Indexer + * @alias module:indexer.Indexer + * @extends EventEmitter + * @property {IndexerDB} db + * @property {Number} height + * @property {ChainState} state + * @emits Indexer#chain tip + */ + +class Indexer extends EventEmitter { + /** + * Create a index db. + * @constructor + * @param {String} module + * @param {Object} options + */ + + constructor(module, options) { + super(); + + assert(typeof module === 'string'); + assert(module.length > 0); + + this.options = new IndexOptions(module, options); + + this.network = this.options.network; + this.logger = this.options.logger.context(`${module}indexer`); + this.client = this.options.client || new NullClient(this); + this.db = null; + this.rescanning = false; + + this.state = new ChainState(); + this.height = 0; + + this.lock = new Lock(); + + this.init(); + } + + /** + * Initialize indexdb. + * @private + */ + + init() { + this._bind(); + } + + /** + * Bind to chain events. + * @private + */ + + _bind() { + this.client.on('error', (err) => { + this.emit('error', err); + }); + + this.client.on('connect', async () => { + try { + await this.syncNode(); + } catch (e) { + this.emit('error', e); + } + }); + + this.client.on('block connect', async (entry, block, view) => { + if (this.rescanning) + return; + try { + await this.addBlock(entry, block, view); + } catch (e) { + this.emit('error', e); + } + }); + + this.client.on('block disconnect', async (entry, block, view) => { + if (this.rescanning) + return; + try { + await this.removeBlock(entry, block, view); + } catch (e) { + this.emit('error', e); + } + }); + + this.client.on('block rescan', async (entry, block, view) => { + try { + await this.rescanBlock(entry, block, view); + } catch (e) { + this.emit('error', e); + } + }); + + this.client.on('chain reset', async (tip) => { + try { + await this.resetChain(tip); + } catch (e) { + this.emit('error', e); + } + }); + + this.client.on('chain tip', async () => { + this.logger.debug('Indexer: finished rescan'); + const tip = await this.getTip(); + this.emit('chain tip', tip); + }); + } + + /** + * Ensure prefix directory (prefix/index). + * @returns {Promise} + */ + + async ensure() { + if (fs.unsupported) + return undefined; + + if (this.options.memory) + return undefined; + + return fs.mkdirp(this.options.prefix); + } + + /** + * Open the indexdb, wait for the database to load. + * @returns {Promise} + */ + + async open() { + await this.ensure(); + await this.db.open(); + await this.db.verify(layout.V.encode(), 'index', 0); + + await this.verifyNetwork(); + + await this.connect(); + } + + /** + * Verify network. + * @returns {Promise} + */ + + async verifyNetwork() { + const raw = await this.db.get(layout.O.encode()); + + if (!raw) { + const b = this.db.batch(); + b.put(layout.O.encode(), fromU32(this.network.magic)); + return b.write(); + } + + const magic = raw.readUInt32LE(0, true); + + if (magic !== this.network.magic) + throw new Error('Network mismatch for Indexer.'); + + return undefined; + } + + /** + * Close the indexdb, wait for the database to close. + * @returns {Promise} + */ + + async close() { + await this.disconnect(); + return this.db.close(); + } + + /** + * Connect to the chain server (client required). + * @returns {Promise} + */ + + async connect() { + return this.client.open(); + } + + /** + * Disconnect from chain server (client required). + * @returns {Promise} + */ + + async disconnect() { + return this.client.close(); + } + + /** + * Sync state with server on every connect. + * @returns {Promise} + */ + + async syncNode() { + const unlock = await this.lock.lock(); + try { + this.logger.info('Resyncing from server...'); + await this.syncState(); + await this.syncChain(); + } finally { + unlock(); + } + } + + /** + * Initialize and write initial sync state. + * @returns {Promise} + */ + + async syncState() { + const cache = await this.getState(); + + if (cache) { + this.state = cache; + this.height = cache.height; + + this.logger.info( + 'Indexer loaded (height=%d, start=%d).', + this.state.height, + this.state.startHeight); + return undefined; + } + + this.logger.info('Initializing database state from server.'); + + const b = this.db.batch(); + const hashes = await this.client.getHashes(); + + let tip = null; + + for (let height = 0; height < hashes.length; height++) { + const hash = hashes[height]; + const meta = new BlockMeta(hash, height); + b.put(layout.h.encode(height), meta.toHash()); + tip = meta; + } + + assert(tip); + + const state = this.state.clone(); + state.startHeight = 0; + state.height = tip.height; + + b.put(layout.R.encode(), state.toRaw()); + + await b.write(); + + this.state = state; + this.height = state.height; + + return undefined; + } + + /** + * Connect and sync with the chain server. + * @private + * @returns {Promise} + */ + + async syncChain() { + let height = this.state.height; + + this.logger.info('Syncing state from height %d.', height); + + // re-org when we're offline might + // leave chain in different state. + // scan chain backwards until we + // find a known 'good' height + for (;;) { + const tip = await this.getBlock(height); + assert(tip); + + if (await this.client.getEntry(tip.hash)) + break; + + assert(height !== 0); + height -= 1; + } + + // start scan from last indexed OR + // last known 'good' height whichever + // is lower, because `scan` scans from + // low to high blocks + if (this.state.startHeight < height) + height = this.state.startHeight; + + this.logger.spam('Starting block rescan from: %d.', height); + return this.scan(height); + } + + /** + * Rescan a block. + * @private + * @param {ChainEntry} entry + * @param {TX[]} txs + * @returns {Promise} + */ + + async rescanBlock(entry, block, view) { + this.logger.spam('Rescanning block: %d.', entry.height); + + if (!this.rescanning) { + this.logger.warning('Unsolicited rescan block: %d.', entry.height); + return; + } + + if (entry.height % 1000 === 0) + this.logger.debug('rescanned block: %d.', entry.height); + + if (entry.height > this.state.height + 1) { + this.logger.warning('Rescan block too high: %d.', entry.height); + return; + } + + try { + await this._addBlock(entry, block, view); + } catch (e) { + this.emit('error', e); + throw e; + } + } + + /** + * Rescan blockchain from a given height. + * @private + * @param {Number?} height + * @returns {Promise} + */ + + async scan(height) { + assert((height >>> 0) === height, 'Indexer: Must pass in a height.'); + + await this.rollback(height); + + const tip = this.state.height; + + this.logger.info( + 'Indexer is scanning %d blocks.', + tip - height + 1); + + try { + this.rescanning = true; + this.logger.debug('rescanning from %d to %d', height, tip); + await this.client.rescan(height); + } finally { + this.rescanning = false; + } + } + + /** + * Force a rescan. + * @param {Number} height + * @returns {Promise} + */ + + async rescan(height) { + const unlock = await this.lock.lock(); + try { + return await this._rescan(height); + } finally { + unlock(); + } + } + + /** + * Force a rescan (without a lock). + * @private + * @param {Number} height + * @returns {Promise} + */ + + async _rescan(height) { + return this.scan(height); + } + + /** + * Get the best block hash. + * @returns {Promise} + */ + + async getState() { + const data = await this.db.get(layout.R.encode()); + + if (!data) + return null; + + return ChainState.fromRaw(data); + } + + /** + * Sync the current chain state to tip. + * @param {BlockMeta} tip + * @returns {Promise} + */ + + async setTip(tip) { + const b = this.db.batch(); + const state = this.state.clone(); + + if (tip.height < state.height) { + // Hashes ahead of our new tip + // that we need to delete. + while (state.height !== tip.height) { + b.del(layout.h.encode(state.height)); + state.height -= 1; + } + } else if (tip.height > state.height) { + assert(tip.height === state.height + 1, 'Bad chain sync.'); + state.height += 1; + } + + state.startHeight = tip.height; + + // Save tip and state. + b.put(layout.h.encode(tip.height), tip.toHash()); + b.put(layout.R.encode(), state.toRaw()); + + await b.write(); + + this.state = state; + this.height = state.height; + } + + /** + * Get a index block meta. + * @param {Hash} hash + * @returns {Promise} + */ + + async getBlock(height) { + const data = await this.db.get(layout.h.encode(height)); + + if (!data) + return null; + + const block = new BlockMeta(); + block.hash = data; + block.height = height; + + return block; + } + + /** + * Get index tip. + * @param {Hash} hash + * @returns {Promise} + */ + + async getTip() { + const tip = await this.getBlock(this.state.height); + + if (!tip) + throw new Error('Indexer: Tip not found!'); + + return tip; + } + + /** + * Sync with chain height. + * @param {Number} height + * @returns {Promise} + */ + + async rollback(height) { + if (height > this.state.height) + throw new Error('Indexer: Cannot rollback to the future.'); + + if (height === this.state.height) { + this.logger.info('Rolled back to same height (%d).', height); + return; + } + + this.logger.info( + 'Rolling back %d Indexer blocks to height %d.', + this.state.height - height, height); + + const tip = await this.getBlock(height); + assert(tip); + + await this.revert(tip.height); + await this.setTip(tip); + } + + /** + * Add a block's transactions and write the new best hash. + * @param {ChainEntry} entry + * @param {Block} block + * @returns {Promise} + */ + + async addBlock(entry, block, view) { + const unlock = await this.lock.lock(); + try { + return await this._addBlock(entry, block, view); + } finally { + unlock(); + } + } + + /** + * Add a block's transactions without a lock. + * @private + * @param {ChainEntry} entry + * @param {Block} block + * @returns {Promise} + */ + + async _addBlock(entry, block, view) { + const tip = BlockMeta.fromEntry(entry); + + if (tip.height >= this.network.block.slowHeight && !this.rescanning) + this.logger.debug('Adding block: %d.', tip.height); + + this.logger.spam('Adding block: %d.', entry.height); + + if (tip.height === this.state.height) { + // We let blocks of the same height + // through specifically for rescans: + // we always want to rescan the last + // block since the state may have + // updated before the block was fully + // processed (in the case of a crash). + this.logger.warning('Already saw Indexer block (%d).', tip.height); + } else if (tip.height !== this.state.startHeight + 1) { + await this.scan(this.state.height); + return; + } + + this.logger.spam('Indexing block: %d.', entry.height); + + await this.indexBlock(entry, block, view); + + // Sync the state to the new tip. + await this.setTip(tip); + + return; + } + + /** + * Process block indexing + * Indexers will implement this method to process the block for indexing + * @param {ChainEntry} entry + * @param {Block} block + * @returns {Promise} + */ + + async indexBlock(entry, block, view) { + ; + } + + /** + * Undo block indexing + * Indexers will implement this method to undo indexing for the block + * @param {ChainEntry} entry + * @param {Block} block + * @returns {Promise} + */ + + async unindexBlock(entry, block, view) { + ; + } + + /** + * Revert db to an older state. + * @param {Number} target + * @returns {Promise} + */ + + async revert(target) { + ; + } + + /** + * Unconfirm a block's transactions + * and write the new best hash (SPV version). + * @param {ChainEntry} entry + * @returns {Promise} + */ + + async removeBlock(entry, block, view) { + const unlock = await this.lock.lock(); + try { + return await this._removeBlock(entry, block, view); + } finally { + unlock(); + } + } + + /** + * Unconfirm a block's transactions. + * @private + * @param {ChainEntry} entry + * @returns {Promise} + */ + + async _removeBlock(entry, block, view) { + const tip = BlockMeta.fromEntry(entry); + + this.logger.spam('Removing block: %d.', entry.height); + + if (tip.height === 0) + throw new Error('Indexer: Bad disconnection (genesis block).'); + + if (tip.height > this.state.height) { + this.logger.warning( + 'Indexer is disconnecting high blocks (%d).', + tip.height); + return; + } + + if (tip.height !== this.state.height) + throw new Error('Indexer: Bad disconnection (height mismatch).'); + + this.logger.spam('Unindexing block: %d.', entry.height); + + await this.unindexBlock(entry, block, view); + + const prev = await this.getBlock(tip.height - 1); + assert(prev); + + // Sync the state to the previous tip. + await this.setTip(prev); + + return; + } + + /** + * Handle a chain reset. + * @param {ChainEntry} entry + * @returns {Promise} + */ + + async resetChain(entry) { + const unlock = await this.lock.lock(); + try { + return await this._resetChain(entry); + } finally { + unlock(); + } + } + + /** + * Handle a chain reset without a lock. + * @private + * @param {ChainEntry} entry + * @returns {Promise} + */ + + async _resetChain(entry) { + if (entry.height > this.state.height) + throw new Error('Indexer: Bad reset height.'); + + return this.rollback(entry.height); + } +} + +/** + * Index Options + * @alias module:indexer.IndexOptions + */ + +class IndexOptions { + /** + * Create index options. + * @constructor + * @param {String} module + * @param {Object} options + */ + + constructor(module, options) { + this.module = module; + this.network = Network.primary; + this.logger = Logger.global; + this.client = null; + this.chain = null; + this.indexers = null; + + this.prefix = null; + this.location = null; + this.memory = true; + this.maxFiles = 64; + this.cacheSize = 16 << 20; + this.compression = true; + + if (options) + this.fromOptions(options); + } + + /** + * Inject properties from object. + * @private + * @param {Object} options + * @returns {IndexOptions} + */ + + fromOptions(options) { + if (options.network != null) + this.network = Network.get(options.network); + + if (options.logger != null) { + assert(typeof options.logger === 'object'); + this.logger = options.logger; + } + + if (options.client != null) { + assert(typeof options.client === 'object'); + this.client = options.client; + } + + if (options.chain != null) { + assert(typeof options.chain === 'object'); + this.client = new ChainClient(options.chain); + } + + if (!this.client) { + throw new Error('Client is required'); + } + + if (options.prefix != null) { + assert(typeof options.prefix === 'string'); + this.prefix = options.prefix; + this.prefix = path.join(this.prefix, 'index'); + this.location = path.join(this.prefix, this.module); + } + + if (options.location != null) { + assert(typeof options.location === 'string'); + this.location = options.location; + } + + if (options.memory != null) { + assert(typeof options.memory === 'boolean'); + this.memory = options.memory; + } + + if (options.maxFiles != null) { + assert((options.maxFiles >>> 0) === options.maxFiles); + this.maxFiles = options.maxFiles; + } + + if (options.cacheSize != null) { + assert(Number.isSafeInteger(options.cacheSize) && options.cacheSize >= 0); + this.cacheSize = options.cacheSize; + } + + if (options.compression != null) { + assert(typeof options.compression === 'boolean'); + this.compression = options.compression; + } + + return this; + } + + /** + * Instantiate chain options from object. + * @param {Object} options + * @returns {IndexOptions} + */ + + static fromOptions(options) { + return new this().fromOptions(options); + } +} + +/* + * Helpers + */ + +/** + * fromU32 + * read a 4 byte Uint32LE + * @param {Number} num number + * @returns {Buffer} buffer + */ +function fromU32(num) { + const data = Buffer.allocUnsafe(4); + data.writeUInt32LE(num, 0, true); + return data; +} + +/* + * Expose + */ + +module.exports = Indexer; diff --git a/lib/indexer/layout.js b/lib/indexer/layout.js new file mode 100644 index 000000000..e2bc243c3 --- /dev/null +++ b/lib/indexer/layout.js @@ -0,0 +1,31 @@ +/*! + * layout.js - indexer layout for bcoin + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const bdb = require('bdb'); + +/* + * Index Database Layout: + * To be extended by indexer implementations + * V -> db version + * O -> flags + * h[height] -> recent block hash + * R -> chain sync state + */ + +const layout = { + V: bdb.key('V'), + O: bdb.key('O'), + h: bdb.key('h', ['uint32']), + R: bdb.key('R') +}; + +/* + * Expose + */ + +module.exports = layout; diff --git a/lib/indexer/nullclient.js b/lib/indexer/nullclient.js new file mode 100644 index 000000000..d71dba5ef --- /dev/null +++ b/lib/indexer/nullclient.js @@ -0,0 +1,142 @@ +/*! + * nullclient.js - chain client for bcoin + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('assert'); +const EventEmitter = require('events'); + +/** + * Null Client + * Sort of a fake local client for separation of concerns. + * @alias module:indexer.NullClient + */ + +class NullClient extends EventEmitter { + /** + * Create a client. + * @constructor + * @param {Chain} chain + */ + + constructor(chain) { + super(); + + this.chain = chain; + this.network = chain.network; + this.opened = false; + } + + /** + * Open the client. + * @returns {Promise} + */ + + async open(options) { + assert(!this.opened, 'NullClient is already open.'); + this.opened = true; + setImmediate(() => this.emit('connect')); + } + + /** + * Close the client. + * @returns {Promise} + */ + + async close() { + assert(this.opened, 'NullClient is not open.'); + this.opened = false; + setImmediate(() => this.emit('disconnect')); + } + + /** + * Get chain tip. + * @returns {Promise} + */ + + async getTip() { + const {hash, height, time} = this.network.genesis; + return { hash, height, time }; + } + + /** + * Get chain entry. + * @param {Hash} hash + * @returns {Promise} - Returns {@link ChainEntry}. + */ + + async getEntry(hash) { + return { hash, height: 0, time: 0 }; + } + + /** + * Get a coin (unspents only). + * @param {Hash} hash + * @param {Number} index + * @returns {Promise} - Returns {@link Coin}. + */ + + async getCoin(hash, index) { + return null; + } + + /** + * Get hash range. + * @param {Number} start + * @param {Number} end + * @returns {Promise} + */ + + async getHashes(start = -1, end = -1) { + return [this.network.genesis.hash]; + } + + /** + * Get block + * @param {Hash} hash + * @returns {Promise} + */ + + async getBlock(hash) { + return null; + } + + /** + * Get a historical block coin viewpoint. + * @param {Block} hash + * @returns {Promise} - Returns {@link CoinView}. + */ + + async getBlockView(block) { + return null; + } + + /** + * Get coin viewpoint. + * @param {TX} tx + * @returns {Promise} - Returns {@link CoinView}. + */ + + async getCoinView(tx) { + return null; + } + + /** + * Rescan for any missed blocks. + * @param {Number} start - Start block. + * @returns {Promise} + */ + + async rescan(start) { + ; + } +} + +/* + * Expose + */ + +module.exports = NullClient; diff --git a/lib/indexer/records.js b/lib/indexer/records.js new file mode 100644 index 000000000..3e67e8436 --- /dev/null +++ b/lib/indexer/records.js @@ -0,0 +1,221 @@ +/*! + * records.js - indexer records + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +/** + * @module lib/records + */ + +const bio = require('bufio'); +const util = require('../utils/util'); +const consensus = require('../protocol/consensus'); + +/** + * Chain State + * @alias module:indexer.ChainState + */ + +class ChainState { + /** + * Create a chain state. + * @constructor + */ + + constructor() { + this.startHeight = 0; + this.height = 0; + } + + /** + * Clone the state. + * @returns {ChainState} + */ + + clone() { + const state = new ChainState(); + state.startHeight = this.startHeight; + state.height = this.height; + return state; + } + + /** + * Inject properties from serialized data. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + + this.startHeight = br.readU32(); + this.height = br.readU32(); + + return this; + } + + /** + * Instantiate chain state from serialized data. + * @param {Buffer} data + * @returns {ChainState} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } + + /** + * Serialize the chain state. + * @returns {Buffer} + */ + + toRaw() { + const bw = bio.write(8); + + bw.writeU32(this.startHeight); + bw.writeU32(this.height); + + return bw.render(); + } +} + +/** + * Block Meta + * @alias module:indexer.BlockMeta + */ + +class BlockMeta { + /** + * Create block meta. + * @constructor + * @param {Hash} hash + * @param {Number} height + */ + + constructor(hash, height) { + this.hash = hash || consensus.NULL_HASH; + this.height = height != null ? height : -1; + } + + /** + * Clone the block. + * @returns {BlockMeta} + */ + + clone() { + return new this.constructor(this.hash, this.height); + } + + /** + * Get block meta hash as a buffer. + * @returns {Buffer} + */ + + toHash() { + return Buffer.from(this.hash, 'hex'); + } + + /** + * Instantiate block meta from chain entry. + * @private + * @param {IndexEntry} entry + */ + + fromEntry(entry) { + this.hash = entry.hash; + this.height = entry.height; + return this; + } + + /** + * Instantiate block meta from json object. + * @private + * @param {Object} json + */ + + fromJSON(json) { + this.hash = util.revHex(json.hash); + this.height = json.height; + return this; + } + + /** + * Instantiate block meta from serialized tip data. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + this.hash = br.readHash('hex'); + this.height = br.readI32(); + return this; + } + + /** + * Instantiate block meta from chain entry. + * @param {IndexEntry} entry + * @returns {BlockMeta} + */ + + static fromEntry(entry) { + return new this().fromEntry(entry); + } + + /** + * Instantiate block meta from json object. + * @param {Object} json + * @returns {BlockMeta} + */ + + static fromJSON(json) { + return new this().fromJSON(json); + } + + /** + * Instantiate block meta from serialized data. + * @param {Hash} hash + * @param {Buffer} data + * @returns {BlockMeta} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } + + /** + * Serialize the block meta. + * @returns {Buffer} + */ + + toRaw() { + const bw = bio.write(36); + bw.writeHash(this.hash); + bw.writeI32(this.height); + return bw.render(); + } + + /** + * Convert the block meta to a more json-friendly object. + * @returns {Object} + */ + + toJSON() { + return { + hash: util.revHex(this.hash), + height: this.height + }; + } +} + +/* + * Expose + */ + +exports.ChainState = ChainState; +exports.BlockMeta = BlockMeta; + +module.exports = exports; diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js new file mode 100644 index 000000000..ad953b63b --- /dev/null +++ b/lib/indexer/txindexer.js @@ -0,0 +1,151 @@ +/*! + * txindexer.js - tx indexer + * Copyright (c) 2018, the bcoin developers (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const bdb = require('bdb'); +const layout = require('./layout'); +const TXMeta = require('../primitives/txmeta'); +const Indexer = require('./indexer'); + +/* + * TXIndexer Database Layout: + * t[hash] -> extended tx +*/ + +Object.assign(layout, { + t: bdb.key('t', ['hash256']) +}); + +/** + * TXIndexer + * @alias module:indexer.TXIndexer + * @extends Indexer + */ + +class TXIndexer extends Indexer { + /** + * Create a indexer + * @constructor + * @param {Object} options + */ + + constructor(options) { + super('tx', options); + + this.db = bdb.create(this.options); + } + + /** + * Index transactions by txid. + * @private + * @param {ChainEntry} entry + * @param {Block} block + * @param {CoinView} view + */ + + async indexBlock(entry, block, view) { + const b = this.db.batch(); + + for (let i = 0; i < block.txs.length; i++) { + const tx = block.txs[i]; + const hash = tx.hash(); + const meta = TXMeta.fromTX(tx, entry, i); + b.put(layout.t.encode(hash), meta.toRaw()); + } + + return b.write(); + } + + /** + * Remove transactions from index. + * @private + * @param {ChainEntry} entry + * @param {Block} block + * @param {CoinView} view + */ + + async unindexBlock(entry, block, view) { + const b = this.db.batch(); + + for (let i = 0; i < block.txs.length; i++) { + const tx = block.txs[i]; + const hash = tx.hash(); + b.del(layout.t.encode(hash)); + } + + return b.write(); + } + + /** + * Get a transaction with metadata. + * @param {Hash} hash + * @returns {Promise} - Returns {@link TXMeta}. + */ + + async getMeta(hash) { + const data = await this.db.get(layout.t.encode(hash)); + + if (!data) + return null; + + return TXMeta.fromRaw(data); + } + + /** + * Retrieve a transaction. + * @param {Hash} hash + * @returns {Promise} - Returns {@link TX}. + */ + + async getTX(hash) { + const meta = await this.getMeta(hash); + + if (!meta) + return null; + + return meta.tx; + } + + /** + * @param {Hash} hash + * @returns {Promise} - Returns Boolean. + */ + + async hasTX(hash) { + return this.db.has(layout.t.encode(hash)); + } + + /** + * Get coin viewpoint (historical). + * @param {TX} tx + * @returns {Promise} - Returns {@link CoinView}. + */ + + async getSpentView(tx) { + const view = await this.client.getCoinView(tx); + + for (const {prevout} of tx.inputs) { + if (view.hasEntry(prevout)) + continue; + + const {hash, index} = prevout; + const meta = await this.getMeta(hash); + + if (!meta) + continue; + + const {tx, height} = meta; + + if (index < tx.outputs.length) + view.addIndex(tx, index, height); + } + + return view; + } +} + +module.exports = TXIndexer; diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 99e1ad9b5..6b7c57532 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -17,6 +17,8 @@ const Node = require('./node'); const HTTP = require('./http'); const RPC = require('./rpc'); const blockstore = require('../blockstore'); +const TXIndexer = require('../indexer/txindexer'); +const AddrIndexer = require('../indexer/addrindexer'); /** * Full Node @@ -154,6 +156,27 @@ class FullNode extends Node { cors: this.config.bool('cors') }); + // Indexers + this.txindex = null; + if (this.config.bool('index-tx')) + this.txindex = new TXIndexer({ + network: this.network, + logger: this.logger, + chain: this.chain, + memory: this.config.bool('memory'), + prefix: this.config.filter('index').str('prefix') || this.config.prefix + }); + + this.addrindex = null; + if (this.config.bool('index-address')) + this.addrindex= new AddrIndexer({ + network: this.network, + logger: this.logger, + chain: this.chain, + memory: this.config.bool('memory'), + prefix: this.config.filter('index').str('prefix') || this.config.prefix + }); + this.init(); } @@ -169,6 +192,12 @@ class FullNode extends Node { this.pool.on('error', err => this.error(err)); this.miner.on('error', err => this.error(err)); + if (this.txindex) + this.txindex.on('error', err => this.error(err)); + + if (this.addrindex) + this.addrindex.on('error', err => this.error(err)); + if (this.http) this.http.on('error', err => this.error(err)); @@ -235,6 +264,12 @@ class FullNode extends Node { await this.miner.open(); await this.pool.open(); + if (this.txindex) + await this.txindex.open(); + + if (this.addrindex) + await this.addrindex.open(); + await this.openPlugins(); await this.http.open(); @@ -256,6 +291,12 @@ class FullNode extends Node { await this.handlePreclose(); await this.http.close(); + if (this.txindex) + await this.txindex.close(); + + if (this.addrindex) + await this.addrindex.close(); + await this.closePlugins(); await this.pool.close(); @@ -417,10 +458,14 @@ class FullNode extends Node { async getCoinsByAddress(addrs) { const mempool = this.mempool.getCoinsByAddress(addrs); - const chain = await this.chain.getCoinsByAddress(addrs); + + if (!this.addrindex) + return mempool; + + const index = await this.addrindex.getCoinsByAddress(addrs); const out = []; - for (const coin of chain) { + for (const coin of index) { const spent = this.mempool.isSpent(coin.hash, coin.index); if (spent) @@ -444,8 +489,23 @@ class FullNode extends Node { async getMetaByAddress(addrs) { const mempool = this.mempool.getMetaByAddress(addrs); - const chain = await this.chain.getMetaByAddress(addrs); - return chain.concat(mempool); + + if (this.txindex && this.addrindex) { + if (!Array.isArray(addrs)) + addrs = [addrs]; + + const hashes = await this.addrindex.getHashesByAddress(addrs); + const mtxs = []; + + for (const hash of hashes) { + const mtx = await this.txindex.getMeta(hash); + assert(mtx); + mtxs.push(mtx); + } + return mtxs.concat(mempool); + } + + return mempool; } /** @@ -460,7 +520,10 @@ class FullNode extends Node { if (meta) return meta; - return this.chain.getMeta(hash); + if (this.txindex) + return this.txindex.getMeta(hash); + + return null; } /** @@ -472,7 +535,11 @@ class FullNode extends Node { async getMetaView(meta) { if (meta.height === -1) return this.mempool.getSpentView(meta.tx); - return this.chain.getSpentView(meta.tx); + + if (this.txindex) + return this.txindex.getSpentView(meta.tx); + + return null; } /** @@ -517,7 +584,10 @@ class FullNode extends Node { if (this.mempool.hasEntry(hash)) return true; - return this.chain.hasTX(hash); + if (this.txindex) + return this.txindex.hasTX(hash); + + return false; } } diff --git a/lib/node/node.js b/lib/node/node.js index b407020e4..f3d0b7ece 100644 --- a/lib/node/node.js +++ b/lib/node/node.js @@ -64,6 +64,8 @@ class Node extends EventEmitter { this.pool = null; this.miner = null; this.http = null; + this.txindex = null; + this.addrindex = null; this._init(file); } diff --git a/lib/node/rpc.js b/lib/node/rpc.js index d9c08827b..90082f11b 100644 --- a/lib/node/rpc.js +++ b/lib/node/rpc.js @@ -963,8 +963,8 @@ class RPC extends RPCBase { if (hash) { block = await this.chain.getBlock(hash); - } else if (this.chain.options.indexTX) { - const tx = await this.chain.getMeta(last); + } else if (await this.node.hasTX(last)) { + const tx = await this.node.getMeta(last); if (tx) block = await this.chain.getBlock(tx.block); } else { diff --git a/test/indexer-test.js b/test/indexer-test.js new file mode 100644 index 000000000..a6b4ee935 --- /dev/null +++ b/test/indexer-test.js @@ -0,0 +1,129 @@ +/* eslint-env mocha */ +/* eslint prefer-arrow-callback: "off" */ + +'use strict'; + +const assert = require('./util/assert'); +const reorg = require('./util/reorg'); +const Chain = require('../lib/blockchain/chain'); +const WorkerPool = require('../lib/workers/workerpool'); +const Miner = require('../lib/mining/miner'); +const MemWallet = require('./util/memwallet'); +const TXIndexer = require('../lib/indexer/txindexer'); +const AddrIndexer = require('../lib/indexer/addrindexer'); +const Network = require('../lib/protocol/network'); +const network = Network.get('regtest'); + +const workers = new WorkerPool({ + enabled: true +}); + +const chain = new Chain({ + memory: true, + network, + workers +}); + +const miner = new Miner({ + chain, + version: 4, + workers +}); + +const cpu = miner.cpu; + +const wallet = new MemWallet({ + network +}); + +const txindexer = new TXIndexer({ + 'memory': true, + 'network': network, + 'chain': chain +}); + +const addrindexer = new AddrIndexer({ + 'memory': true, + 'network': network, + 'chain': chain +}); + +describe('Indexer', function() { + this.timeout(45000); + + it('should open indexer', async () => { + await chain.open(); + await miner.open(); + await txindexer.open(); + await addrindexer.open(); + }); + + it('should index 10 blocks', async () => { + miner.addresses.length = 0; + miner.addAddress(wallet.getReceive()); + for (let i = 0; i < 10; i++) { + const block = await cpu.mineBlock(); + assert(block); + assert(await chain.add(block)); + } + + assert.strictEqual(chain.height, 10); + assert.strictEqual(txindexer.state.startHeight, 10); + assert.strictEqual(addrindexer.state.startHeight, 10); + + const coins = + await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 10); + + for (const coin of coins) { + const meta = await txindexer.getMeta(coin.hash); + assert.bufferEqual(meta.tx.hash(), coin.hash); + } + }); + + it('should rescan and reindex 10 missed blocks', async () => { + await txindexer.disconnect(); + await addrindexer.disconnect(); + + for (let i = 0; i < 10; i++) { + const block = await cpu.mineBlock(); + assert(block); + assert(await chain.add(block)); + } + + assert.strictEqual(chain.height, 20); + + await txindexer.connect(); + await addrindexer.connect(); + + await new Promise(r => addrindexer.once('chain tip', r)); + + assert.strictEqual(txindexer.state.startHeight, 20); + assert.strictEqual(addrindexer.state.startHeight, 20); + + const coins = + await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 20); + + for (const coin of coins) { + const meta = await txindexer.getMeta(coin.hash); + assert.bufferEqual(meta.tx.hash(), coin.hash); + } + }); + + it('should handle indexing a reorg', async () => { + await reorg(chain, cpu, 10); + + assert.strictEqual(txindexer.state.startHeight, 31); + assert.strictEqual(addrindexer.state.startHeight, 31); + + const coins = + await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 31); + + for (const coin of coins) { + const meta = await txindexer.getMeta(coin.hash); + assert.bufferEqual(meta.tx.hash(), coin.hash); + } + }); +}); diff --git a/test/node-test.js b/test/node-test.js index 1d7a21bd4..fa017fa13 100644 --- a/test/node-test.js +++ b/test/node-test.js @@ -28,6 +28,8 @@ const node = new FullNode({ network: 'regtest', workers: true, plugins: [require('../lib/wallet/plugin')], + indexTX: true, + indexAddress: true, port: ports.p2p, httpPort: ports.node, env: { @@ -756,6 +758,55 @@ describe('Node', function() { assert.strictEqual(tx1.txid(), tx2.txid()); }); + it('should get tx by hash', async () => { + const block = await mineBlock(); + await chain.add(block); + + const tx = block.txs[0]; + const hash = tx.hash(); + const hasTX = await node.hasTX(hash); + + assert.strictEqual(hasTX, true); + + const tx2 = await node.getTX(hash); + assert.strictEqual(tx.txid(), tx2.txid()); + + const meta = await node.getMeta(hash); + assert.strictEqual(meta.tx.txid(), tx2.txid()); + }); + + it('should get coin/tx by addr', async () => { + const addr = await wallet.receiveAddress(); + const mtx = await wallet.createTX({ + rate: 100000, + outputs: [{ + value: 100000, + address: addr + }] + }); + + await wallet.sign(mtx); + + const tx = mtx.toTX(); + const job = await miner.createJob(); + + job.addTX(tx, mtx.view); + job.refresh(); + + const block = await job.mineAsync(); + await chain.add(block); + + await new Promise(r => setTimeout(r, 300)); + + const txs = await node.getTXByAddress(addr.hash); + const tx2 = txs[0]; + assert.strictEqual(tx.txid(), tx2.txid()); + + const coins = await node.getCoinsByAddress(addr.hash); + const coin = coins[0]; + assert.strictEqual(tx.txid(), coin.txid()); + }); + it('should cleanup', async () => { consensus.COINBASE_MATURITY = 100; await node.close(); diff --git a/test/util/reorg.js b/test/util/reorg.js new file mode 100644 index 000000000..bcdf953cb --- /dev/null +++ b/test/util/reorg.js @@ -0,0 +1,63 @@ +'use strict'; + +const assert = require('./assert'); +const Chain = require('../../lib/blockchain/chain'); +const CPUMiner = require('../../lib/mining/cpuminer'); + +/** + * Reorgs the chain to given height using miners. + * @param {Chain} chain chain + * @param {CPUMiner} cpu cpuminer + * @param {Number} height height + * @returns {Promise} null + */ +async function reorg(chain, cpu, height) { + assert(chain instanceof Chain); + assert(cpu instanceof CPUMiner); + assert(typeof height === 'number'); + + let tip1, tip2 = null; + for (let i = 0; i < height; i++) { + const job1 = await cpu.createJob(tip1); + const job2 = await cpu.createJob(tip2); + + const blk1 = await job1.mineAsync(); + const blk2 = await job2.mineAsync(); + + const hash1 = blk1.hash(); + const hash2 = blk2.hash(); + + assert(await chain.add(blk1)); + assert(await chain.add(blk2)); + + assert.bufferEqual(chain.tip.hash, hash1); + + tip1 = await chain.getEntry(hash1); + tip2 = await chain.getEntry(hash2); + + assert(tip1); + assert(tip2); + + assert(!await chain.isMainChain(tip2)); + } + + const entry = await chain.getEntry(tip2.hash); + assert(entry); + assert.strictEqual(chain.height, entry.height); + + const block = await cpu.mineBlock(entry); + assert(block); + + let forked = false; + chain.once('reorganize', () => { + forked = true; + }); + + assert(await chain.add(block)); + + assert(forked); + assert.bufferEqual(chain.tip.hash, block.hash()); + assert(chain.tip.chainwork.gt(tip1.chainwork)); +} + +module.exports = reorg; From 33de39ca0a8718629f5c76192a33a53483216651 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Fri, 22 Mar 2019 16:57:01 -0700 Subject: [PATCH 02/40] migrate: remove txindex and addrindex from chaindb --- migrate/chaindb5to6.js | 79 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 migrate/chaindb5to6.js diff --git a/migrate/chaindb5to6.js b/migrate/chaindb5to6.js new file mode 100644 index 000000000..29d2642ac --- /dev/null +++ b/migrate/chaindb5to6.js @@ -0,0 +1,79 @@ +'use strict'; + +const assert = require('assert'); +const bdb = require('bdb'); +const layout = require('../lib/blockchain/layout'); + +// changes: +// removes tx, addr indexes i.e layout.t, layout.T, layout.C + +assert(process.argv.length > 2, 'Please pass in a database path.'); + +const db = bdb.create({ + location: process.argv[2], + memory: false, + compression: true, + cacheSize: 32 << 20, + createIfMissing: false +}); + +async function removeKey(name, key) { + const iter = db.iterator({ + gte: key.min(), + lte: key.max(), + reverse: true, + keys: true + }); + + let batch = db.batch(); + let total = 0; + + while (await iter.next()) { + const {key} = iter; + batch.del(key); + + if (++total % 10000 === 0) { + console.log('Cleaned up %d %s index records.', total, name); + await batch.write(); + batch = db.batch(); + } + } + await batch.write(); + + console.log('Cleaned up %d %s index records.', total, name); +} + +/* + * Execute + */ + +(async () => { + await db.open(); + + console.log('Opened %s.', process.argv[2]); + console.log('Checking version.'); + await db.verify(layout.V.build(), 'chain', 5); + + const t = bdb.key('t', ['hash256']); + const T = bdb.key('T', ['hash', 'hash256']); + const C = bdb.key('C', ['hash', 'hash256', 'uint32']); + + await removeKey('hash -> tx', t); + await removeKey('addr -> tx', T); + await removeKey('addr -> coin', C); + + console.log('Compacting database...'); + await db.compactRange(); + + console.log('Updating version to %d.', 6); + await db.del(layout.V.build()); + await db.verify(layout.V.build(), 'chain', 6); + + await db.close(); +})().then(() => { + console.log('Migration complete.'); + process.exit(0); +}).catch((err) => { + console.error(err.stack); + process.exit(1); +}); From 51ac4a720bc44252917bc53dc1ac8c1ee5204e41 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Fri, 22 Mar 2019 16:57:56 -0700 Subject: [PATCH 03/40] changelog: add indexer to changelog --- CHANGELOG.md | 66 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6821a4999..7ba6922ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -112,6 +112,72 @@ for downloading the blocks again. - Updates to dependencies including `bcrypto` to version > 3. - Various small fixes to run bcoin in a browser. +## v1.x.x + +### Migration + +The chain indexing subsystem has been refactored to be more modular and +flexible. + +A migration is required to cleanup the old indexes, if present. + +``` bash +$ ./migrate/chaindb4to5.js ~/.bcoin/chain +``` + +**Note**: if enabled, tx and addr indexes will be regenerated by rescanning the +chain on next startup, this process might take a while. Please take the +potential downtime in re-indexing into account before upgrading. + +Indexing has been made extensible so that new indexers such as a filter index +for BIP 157 can be implemented easily. + +Users can toggle any indexing on/off anytime before or after the initial sync. +The indexer will start resyncing the chain state and replaying blocks to +process them for indexing. Once caught up, it will just index new blocks. + +An index can be dropped by just deleting the corresponding database. + +### Notable Changes + +- `__lib/indexer__` `Indexer` implements the base methods which are common to + all indexers, including setting up the database, handling chain events such + as new block etc. + +- By default, bcoin ships `TXIndexer`, `AddrIndexer` implementations. These + indexers preserve all the existing indexing functionality and can be enabled + via the same flags i.e. `--index-tx` `--index-address`, for compatibility. + +- `Indexer` emits a `chain tip` with `[tip]`, where tip is an instance of + `BlockMeta`, when it is caught up with the chain. + +- Database location can be configured via `--index-prefix` config option. + Default locations are `prefix` + `/index` e.g.: `~/.bcoin/testnet/index/tx`, + `~/.bcoin/testnet/index/addr`. + +- `__/lib/blockchain/chain__` - `getSpentView` accepts a `TXMeta` insted of `TX` + +- `__/lib/blockchain/chain__` - the following methods have been moved out of + the chain to the indexers. Using the methods on the chain is deprecated: + + `node.txindex` implements: + + + `getMeta(hash)` + + `getTX(hash)` + + `hasTX(hash)` + + `getSpentView(tx)` + + `node.addrindex` implements: + + + `getCoinsByAddress(addrs)` + + `getHashesByAddress(addrs)` + + The following methods require `getHashesByAddress` in conjunction with + `node.txindex.getTX` and `node.txindex.getMeta` respectively. + + + `getTXByAddress(addrs)` + + `getMetaByAddress(addrs)` + ## v1.0.0 ### Migration From 9f89c79bd7c0301d36a54997a31d9924a3647c09 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Wed, 20 Mar 2019 19:49:04 +0530 Subject: [PATCH 04/40] indexer: work with blockstore --- lib/indexer/txindexer.js | 123 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 115 insertions(+), 8 deletions(-) diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index ad953b63b..b09d48a27 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -6,8 +6,12 @@ 'use strict'; +const assert = require('bsert'); const bdb = require('bdb'); +const bio = require('bufio'); const layout = require('./layout'); +const consensus = require('../protocol/consensus'); +const TX = require('../primitives/tx'); const TXMeta = require('../primitives/txmeta'); const Indexer = require('./indexer'); @@ -20,6 +24,83 @@ Object.assign(layout, { t: bdb.key('t', ['hash256']) }); +/** + * Transaction Record + */ + +class TxRecord { + /** + * Create a block record. + * @constructor + */ + + constructor(options = {}) { + this.block = options.block || consensus.ZERO_HASH; + this.height = options.height || 0; + this.time = options.time || 0; + this.index = options.index || 0; + this.offset = options.offset || 0; + this.length = options.length || 0; + + assert((this.height >>> 0) === this.height); + assert((this.time >>> 0) === this.time); + assert((this.index >>> 0) === this.index); + assert((this.offset >>> 0) === this.offset); + assert((this.length >>> 0) === this.length); + } + + /** + * Inject properties from serialized data. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + + this.block = br.readHash(); + this.height = br.readU32(); + this.time = br.readU32(); + this.index = br.readU32(); + if (this.index === 0x7fffffff) + this.index = -1; + + this.offset = br.readU32(); + this.length = br.readU32(); + + return this; + } + + /** + * Instantiate block record from serialized data. + * @param {Hash} hash + * @param {Buffer} data + * @returns {BlockRecord} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } + + /** + * Serialize the block record. + * @returns {Buffer} + */ + + toRaw() { + const bw = bio.write(52); + + bw.writeHash(this.block); + bw.writeU32(this.height); + bw.writeU32(this.time); + bw.writeU32(this.index); + bw.writeU32(this.offset); + bw.writeU32(this.length); + + return bw.render(); + } +} + /** * TXIndexer * @alias module:indexer.TXIndexer @@ -50,11 +131,27 @@ class TXIndexer extends Indexer { async indexBlock(entry, block, view) { const b = this.db.batch(); - for (let i = 0; i < block.txs.length; i++) { - const tx = block.txs[i]; + const data = block.toRaw(); + const br = bio.read(data); + // ignore header + br.readBytes(80); + const count = br.readVarint(); + + for (let i = 0; i < count; i++) { + const offset = br.offset; + const tx = TX.fromReader(br); + const length = br.offset - offset; const hash = tx.hash(); - const meta = TXMeta.fromTX(tx, entry, i); - b.put(layout.t.encode(hash), meta.toRaw()); + + const txrecord = new TxRecord({ + block: entry.hash, + height: entry.height, + time: entry.time, + index: i, + offset: offset, + length: length + }); + b.put(layout.t.encode(hash), txrecord.toRaw()); } return b.write(); @@ -87,12 +184,22 @@ class TXIndexer extends Indexer { */ async getMeta(hash) { - const data = await this.db.get(layout.t.encode(hash)); - - if (!data) + const raw = await this.db.get(layout.t.encode(hash)); + if (!raw) return null; - return TXMeta.fromRaw(data); + const record = TxRecord.fromRaw(raw); + + const data = await this.read(record.block, record.offset, record.length); + const tx = TX.fromRaw(data); + + const meta = TXMeta.fromTX(tx); + meta.height = record.height; + meta.block = record.block; + meta.time = record.time; + meta.index = record.index; + + return meta; } /** From f9aab08c46574be4ff9306a30c2c3589a59e9a45 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 22 Mar 2019 13:30:00 -0700 Subject: [PATCH 05/40] indexer: add block position to tx --- lib/indexer/indexer.js | 2 + lib/indexer/txindexer.js | 17 +++----- lib/primitives/block.js | 13 +++++- lib/primitives/tx.js | 54 +++++++++++++++++++++---- test/block-test.js | 87 ++++++++++++++++++++++++++++++++++++++++ test/indexer-test.js | 10 ++++- 6 files changed, 161 insertions(+), 22 deletions(-) diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index b894d2253..87356ad8b 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -533,6 +533,8 @@ class Indexer extends EventEmitter { */ async _addBlock(entry, block, view) { + assert(block.hasRaw(), 'Expected raw data for block.'); + const tip = BlockMeta.fromEntry(entry); if (tip.height >= this.network.block.slowHeight && !this.rescanning) diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index b09d48a27..8f68d6b48 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -131,17 +131,11 @@ class TXIndexer extends Indexer { async indexBlock(entry, block, view) { const b = this.db.batch(); - const data = block.toRaw(); - const br = bio.read(data); - // ignore header - br.readBytes(80); - const count = br.readVarint(); - - for (let i = 0; i < count; i++) { - const offset = br.offset; - const tx = TX.fromReader(br); - const length = br.offset - offset; + for (let i = 0; i < block.txs.length; i++) { + const tx = block.txs[i]; + const hash = tx.hash(); + const {offset, size} = tx.getPosition(); const txrecord = new TxRecord({ block: entry.hash, @@ -149,8 +143,9 @@ class TXIndexer extends Indexer { time: entry.time, index: i, offset: offset, - length: length + length: size }); + b.put(layout.t.encode(hash), txrecord.toRaw()); } diff --git a/lib/primitives/block.js b/lib/primitives/block.js index b10b3dc92..f0766f62d 100644 --- a/lib/primitives/block.js +++ b/lib/primitives/block.js @@ -109,6 +109,15 @@ class Block extends AbstractBlock { return this.frame().data; } + /** + * Check if block has been serialized. + * @returns {Buffer} + */ + + hasRaw() { + return Boolean(this._raw); + } + /** * Serialize the block, do not include witnesses. * @returns {Buffer} @@ -645,7 +654,7 @@ class Block extends AbstractBlock { let witness = 0; for (let i = 0; i < count; i++) { - const tx = TX.fromReader(br); + const tx = TX.fromReader(br, true); witness += tx._witness; this.txs.push(tx); } @@ -738,7 +747,7 @@ class Block extends AbstractBlock { bw.writeVarint(this.txs.length); for (const tx of this.txs) - tx.toWriter(bw); + tx.toWriter(bw, true); return bw; } diff --git a/lib/primitives/tx.js b/lib/primitives/tx.js index d3ead2446..bfc94b11a 100644 --- a/lib/primitives/tx.js +++ b/lib/primitives/tx.js @@ -57,6 +57,8 @@ class TX { this._whash = null; this._raw = null; + this._offset = -1; + this._block = false; this._size = -1; this._witness = -1; this._sigops = -1; @@ -157,6 +159,8 @@ class TX { this._raw = null; this._size = -1; + this._offset = -1; + this._block = false; this._witness = -1; this._sigops = -1; @@ -245,15 +249,21 @@ class TX { /** * Write the transaction to a buffer writer. * @param {BufferWriter} bw + * @param {Boolean} block */ - toWriter(bw) { + toWriter(bw, block) { if (this.mutable) { if (this.hasWitness()) return this.writeWitness(bw); return this.writeNormal(bw); } + if (block) { + this._offset = bw.offset; + this._block = true; + } + bw.writeBytes(this.toRaw()); return bw; @@ -311,6 +321,21 @@ class TX { return raw; } + /** + * Return the offset and size of the transaction. Useful + * when the transaction is deserialized within a block. + * @returns {Object} Contains `size` and `offset`. + */ + + getPosition() { + assert(this._block && this._offset > 80, 'Position not available.'); + + return { + offset: this._offset, + size: this._size + }; + } + /** * Calculate total size and size of the witness bytes. * @returns {Object} Contains `size` and `witness`. @@ -2226,11 +2251,12 @@ class TX { /** * Instantiate a transaction from a buffer reader. * @param {BufferReader} br + * @param {Boolean} block * @returns {TX} */ - static fromReader(br) { - return new this().fromReader(br); + static fromReader(br, block) { + return new this().fromReader(br, block); } /** @@ -2247,13 +2273,14 @@ class TX { * Inject properties from buffer reader. * @private * @param {BufferReader} br + * @param {Boolean} block */ - fromReader(br) { + fromReader(br, block) { if (hasWitnessBytes(br)) - return this.fromWitnessReader(br); + return this.fromWitnessReader(br, block); - br.start(); + const start = br.start(); this.version = br.readU32(); @@ -2269,6 +2296,11 @@ class TX { this.locktime = br.readU32(); + if (block) { + this._offset = start; + this._block = true; + } + if (!this.mutable) { this._raw = br.endData(); this._size = this._raw.length; @@ -2285,10 +2317,11 @@ class TX { * buffer reader (witness serialization). * @private * @param {BufferReader} br + * @param {Boolean} block */ - fromWitnessReader(br) { - br.start(); + fromWitnessReader(br, block) { + const start = br.start(); this.version = br.readU32(); @@ -2336,6 +2369,11 @@ class TX { this.locktime = br.readU32(); + if (block) { + this._offset = start; + this._block = true; + } + if (!this.mutable && hasWitness) { this._raw = br.endData(); this._size = this._raw.length; diff --git a/test/block-test.js b/test/block-test.js index 2ff7848a1..20f2047b2 100644 --- a/test/block-test.js +++ b/test/block-test.js @@ -375,4 +375,91 @@ describe('Block', function() { }); } } + + it('should deserialize with offset positions for txs (witness)', () => { + const [block] = block482683.getBlock(); + + const expected = [ + {offset: 81, size: 217}, + {offset: 298, size: 815}, + {offset: 1113, size: 192}, + {offset: 1305, size: 259}, + {offset: 1564, size: 223}, + {offset: 1787, size: 1223}, + {offset: 3010, size: 486}, + {offset: 3496, size: 665}, + {offset: 4161, size: 3176}, + {offset: 7337, size: 225}, + {offset: 7562, size: 1223}, + {offset: 8785, size: 503} + ]; + + assert.equal(expected.length, block.txs.length); + assert.equal(block.getSize(), expected.reduce((a, b) => a + b.size, 81)); + + for (let i = 0; i < block.txs.length; i++) { + const {offset, size} = block.txs[i].getPosition(); + + assert.strictEqual(offset, expected[i].offset); + assert.strictEqual(size, expected[i].size); + } + }); + + it('should serialize with offset positions for txs (witness)', () => { + const [block] = block482683.getBlock(); + + const expected = [ + {offset: 81, size: 217}, + {offset: 298, size: 815}, + {offset: 1113, size: 192}, + {offset: 1305, size: 259}, + {offset: 1564, size: 223}, + {offset: 1787, size: 1223}, + {offset: 3010, size: 486}, + {offset: 3496, size: 665}, + {offset: 4161, size: 3176}, + {offset: 7337, size: 225}, + {offset: 7562, size: 1223}, + {offset: 8785, size: 503} + ]; + + assert.equal(expected.length, block.txs.length); + assert.equal(block.getSize(), expected.reduce((a, b) => a + b.size, 81)); + + // Reset the offset for all transactions, and clear + // any cached values for the block. + block.refresh(true); + for (let i = 0; i < block.txs.length; i++) + assert.equal(block.txs[i]._offset, -1); + + // Serialize the block, as done before saving to disk. + const raw = block.toRaw(); + assert(raw); + + for (let i = 0; i < block.txs.length; i++) { + const {offset, size} = block.txs[i].getPosition(); + + assert.strictEqual(offset, expected[i].offset); + assert.strictEqual(size, expected[i].size); + } + }); + + it('should deserialize with offset positions for txs', () => { + const [block] = block300025.getBlock(); + + assert.equal(block.txs.length, 461); + + let expect = 83; + let total = 83; + + for (let i = 0; i < block.txs.length; i++) { + const {offset, size} = block.txs[i].getPosition(); + + assert.strictEqual(offset, expect); + expect += size; + total += size; + } + + assert.equal(total, 284231); + }); }); diff --git a/test/indexer-test.js b/test/indexer-test.js index a6b4ee935..b7a610472 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -11,6 +11,7 @@ const Miner = require('../lib/mining/miner'); const MemWallet = require('./util/memwallet'); const TXIndexer = require('../lib/indexer/txindexer'); const AddrIndexer = require('../lib/indexer/addrindexer'); +const BlockStore = require('../lib/blockstore/level'); const Network = require('../lib/protocol/network'); const network = Network.get('regtest'); @@ -18,10 +19,16 @@ const workers = new WorkerPool({ enabled: true }); +const blocks = new BlockStore({ + memory: true, + network +}); + const chain = new Chain({ memory: true, network, - workers + workers, + blocks }); const miner = new Miner({ @@ -52,6 +59,7 @@ describe('Indexer', function() { this.timeout(45000); it('should open indexer', async () => { + await blocks.open(); await chain.open(); await miner.open(); await txindexer.open(); From 8bff122253cdd83e45a0bd40a4b70fbdce2b8609 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 22 Mar 2019 16:01:51 -0700 Subject: [PATCH 06/40] indexer: use chain and blocks for indexer, remove chain client --- lib/bcoin-browser.js | 1 - lib/bcoin.js | 1 - lib/indexer/addrindexer.js | 2 +- lib/indexer/chainclient.js | 200 ------------------------------------- lib/indexer/index.js | 1 - lib/indexer/indexer.js | 196 ++++++++++++++---------------------- lib/indexer/nullclient.js | 142 -------------------------- lib/indexer/txindexer.js | 6 +- lib/node/fullnode.js | 10 +- test/indexer-test.js | 26 ++--- 10 files changed, 96 insertions(+), 489 deletions(-) delete mode 100644 lib/indexer/chainclient.js delete mode 100644 lib/indexer/nullclient.js diff --git a/lib/bcoin-browser.js b/lib/bcoin-browser.js index 711818adf..63ce0cd0c 100644 --- a/lib/bcoin-browser.js +++ b/lib/bcoin-browser.js @@ -58,7 +58,6 @@ bcoin.Mnemonic = require('./hd/mnemonic'); // Index bcoin.indexer = require('./indexer'); bcoin.Indexer = require('./indexer/indexer'); -bcoin.ChainClient = require('./indexer/chainclient'); bcoin.TXIndexer = require('./indexer/txindexer'); bcoin.AddrIndexer = require('./indexer/addrindexer'); diff --git a/lib/bcoin.js b/lib/bcoin.js index 8bff04423..4e26fc486 100644 --- a/lib/bcoin.js +++ b/lib/bcoin.js @@ -79,7 +79,6 @@ bcoin.define('Mnemonic', './hd/mnemonic'); // Index bcoin.define('indexer', './indexer'); bcoin.define('Indexer', './indexer/indexer'); -bcoin.define('ChainClient', './indexer/chainclient'); bcoin.define('TXIndexer', './indexer/txindexer'); bcoin.define('AddrIndexer', './indexer/addrindexer'); diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 6d274328c..765398f8b 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -159,7 +159,7 @@ class AddrIndexer extends Indexer { }); for (const [hash, index] of keys) { - const coin = await this.client.getCoin(hash, index); + const coin = await this.chain.getCoin(hash, index); assert(coin); coins.push(coin); } diff --git a/lib/indexer/chainclient.js b/lib/indexer/chainclient.js deleted file mode 100644 index cd86d25bf..000000000 --- a/lib/indexer/chainclient.js +++ /dev/null @@ -1,200 +0,0 @@ -/*! - * chainclient.js - chain client for bcoin - * Copyright (c) 2018, the bcoin developers (MIT License). - * https://github.com/bcoin-org/bcoin - */ - -'use strict'; - -const assert = require('assert'); -const AsyncEmitter = require('bevent'); -const Chain = require('../blockchain/chain'); - -/** - * Chain Client - * @extends AsyncEmitter - * @alias module:indexer.ChainClient - */ - -class ChainClient extends AsyncEmitter { - /** - * Create a chain client. - * @constructor - * @param {Chain} chain - */ - - constructor(chain) { - super(); - - assert(chain instanceof Chain); - - this.chain = chain; - this.network = chain.network; - this.opened = false; - - this.init(); - } - - /** - * Initialize the client. - */ - - init() { - this.chain.on('connect', async (entry, block, view) => { - if (!this.opened) - return; - - await this.emitAsync('block connect', entry, block, view); - }); - - this.chain.on('disconnect', async (entry, block, view) => { - if (!this.opened) - return; - - await this.emitAsync('block disconnect', entry, block, view); - }); - - this.chain.on('reset', async (tip) => { - if (!this.opened) - return; - - await this.emitAsync('chain reset', tip); - }); - } - - /** - * Open the client. - * @returns {Promise} - */ - - async open(options) { - assert(!this.opened, 'ChainClient is already open.'); - this.opened = true; - setImmediate(() => this.emit('connect')); - } - - /** - * Close the client. - * @returns {Promise} - */ - - async close() { - assert(this.opened, 'ChainClient is not open.'); - this.opened = false; - setImmediate(() => this.emit('disconnect')); - } - - /** - * Get chain tip. - * @returns {Promise} - */ - - async getTip() { - return this.chain.tip; - } - - /** - * Get chain entry. - * @param {Hash} hash - * @returns {Promise} - Returns {@link ChainEntry}. - */ - - async getEntry(hash) { - const entry = await this.chain.getEntry(hash); - - if (!entry) - return null; - - if (!await this.chain.isMainChain(entry)) - return null; - - return entry; - } - - /** - * Get a coin (unspents only). - * @param {Hash} hash - * @param {Number} index - * @returns {Promise} - Returns {@link Coin}. - */ - - async getCoin(hash, index) { - return this.chain.getCoin(hash, index); - } - - /** - * Get hash range. - * @param {Number} start - * @param {Number} end - * @returns {Promise} - */ - - async getHashes(start = -1, end = -1) { - return this.chain.getHashes(start, end); - } - - /** - * Get block - * @param {Hash} hash - * @returns {Promise} - Returns {@link Block} - */ - - async getBlock(hash) { - const block = await this.chain.getBlock(hash); - - if (!block) - return null; - - return block; - } - - /** - * Get a historical block coin viewpoint. - * @param {Block} hash - * @returns {Promise} - Returns {@link CoinView}. - */ - - async getBlockView(block) { - return this.chain.getBlockView(block); - } - - /** - * Get coin viewpoint. - * @param {TX} tx - * @returns {Promise} - Returns {@link CoinView}. - */ - - async getCoinView(tx) { - return this.chain.getCoinView(tx); - } - - /** - * Rescan for any missed blocks. - * @param {Number} start - Start block. - * @returns {Promise} - */ - - async rescan(start) { - for (let i = start; ; i++) { - const entry = await this.getEntry(i); - if (!entry) { - await this.emitAsync('chain tip'); - break; - }; - - const block = await this.getBlock(entry.hash); - assert(block); - - const view = await this.getBlockView(block); - assert(view); - - await this.emitAsync('block rescan', entry, block, view); - } - }; -} - -/* - * Expose - */ - -module.exports = ChainClient; diff --git a/lib/indexer/index.js b/lib/indexer/index.js index 96ad09a3f..129b3d93b 100644 --- a/lib/indexer/index.js +++ b/lib/indexer/index.js @@ -13,4 +13,3 @@ exports.Indexer = require('./indexer'); exports.TXIndexer = require('./txindexer'); exports.AddrIndexer = require('./addrindexer'); -exports.ChainClient = require('./chainclient'); diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 87356ad8b..dc7a5c35c 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -15,8 +15,6 @@ const Logger = require('blgr'); const Network = require('../protocol/network'); const layout = require('./layout'); const records = require('./records'); -const ChainClient = require('./chainclient'); -const NullClient = require('./nullclient'); const { ChainState, @@ -51,7 +49,9 @@ class Indexer extends EventEmitter { this.network = this.options.network; this.logger = this.options.logger.context(`${module}indexer`); - this.client = this.options.client || new NullClient(this); + this.blocks = this.options.blocks; + this.chain = this.options.chain; + this.db = null; this.rescanning = false; @@ -59,17 +59,6 @@ class Indexer extends EventEmitter { this.height = 0; this.lock = new Lock(); - - this.init(); - } - - /** - * Initialize indexdb. - * @private - */ - - init() { - this._bind(); } /** @@ -77,22 +66,11 @@ class Indexer extends EventEmitter { * @private */ - _bind() { - this.client.on('error', (err) => { - this.emit('error', err); - }); - - this.client.on('connect', async () => { - try { - await this.syncNode(); - } catch (e) { - this.emit('error', e); - } - }); - - this.client.on('block connect', async (entry, block, view) => { + bind() { + this.chain.on('connect', async (entry, block, view) => { if (this.rescanning) return; + try { await this.addBlock(entry, block, view); } catch (e) { @@ -100,37 +78,24 @@ class Indexer extends EventEmitter { } }); - this.client.on('block disconnect', async (entry, block, view) => { + this.chain.on('disconnect', async (entry, block, view) => { if (this.rescanning) return; - try { - await this.removeBlock(entry, block, view); - } catch (e) { - this.emit('error', e); - } - }); - this.client.on('block rescan', async (entry, block, view) => { try { - await this.rescanBlock(entry, block, view); + await this.removeBlock(entry, block, view); } catch (e) { this.emit('error', e); } }); - this.client.on('chain reset', async (tip) => { + this.chain.on('reset', async (tip) => { try { await this.resetChain(tip); } catch (e) { this.emit('error', e); } }); - - this.client.on('chain tip', async () => { - this.logger.debug('Indexer: finished rescan'); - const tip = await this.getTip(); - this.emit('chain tip', tip); - }); } /** @@ -160,7 +125,9 @@ class Indexer extends EventEmitter { await this.verifyNetwork(); - await this.connect(); + this.bind(); + + await this.sync(); } /** @@ -191,34 +158,15 @@ class Indexer extends EventEmitter { */ async close() { - await this.disconnect(); return this.db.close(); } - /** - * Connect to the chain server (client required). - * @returns {Promise} - */ - - async connect() { - return this.client.open(); - } - - /** - * Disconnect from chain server (client required). - * @returns {Promise} - */ - - async disconnect() { - return this.client.close(); - } - /** * Sync state with server on every connect. * @returns {Promise} */ - async syncNode() { + async sync() { const unlock = await this.lock.lock(); try { this.logger.info('Resyncing from server...'); @@ -251,7 +199,7 @@ class Indexer extends EventEmitter { this.logger.info('Initializing database state from server.'); const b = this.db.batch(); - const hashes = await this.client.getHashes(); + const hashes = await this.chain.getHashes(); let tip = null; @@ -278,6 +226,24 @@ class Indexer extends EventEmitter { return undefined; } + /** + * Get a chain entry for the main chain only. + * @private + * @returns {Promise} + */ + + async getEntry(hash) { + const entry = await this.chain.getEntry(hash); + + if (!entry) + return null; + + if (!await this.chain.isMainChain(entry)) + return null; + + return entry; + } + /** * Connect and sync with the chain server. * @private @@ -289,30 +255,29 @@ class Indexer extends EventEmitter { this.logger.info('Syncing state from height %d.', height); - // re-org when we're offline might - // leave chain in different state. - // scan chain backwards until we - // find a known 'good' height + // A re-org when we're offline might + // leave chain in a different state. + // Scan chain backwards until we + // find a known 'good' height. for (;;) { const tip = await this.getBlock(height); assert(tip); - if (await this.client.getEntry(tip.hash)) + if (await this.getEntry(tip.hash)) break; assert(height !== 0); height -= 1; } - // start scan from last indexed OR + // Start scan from last indexed OR // last known 'good' height whichever // is lower, because `scan` scans from // low to high blocks if (this.state.startHeight < height) height = this.state.startHeight; - this.logger.spam('Starting block rescan from: %d.', height); - return this.scan(height); + return this._rescan(height); } /** @@ -332,7 +297,7 @@ class Indexer extends EventEmitter { } if (entry.height % 1000 === 0) - this.logger.debug('rescanned block: %d.', entry.height); + this.logger.debug('Rescanned block: %d.', entry.height); if (entry.height > this.state.height + 1) { this.logger.warning('Rescan block too high: %d.', entry.height); @@ -347,33 +312,6 @@ class Indexer extends EventEmitter { } } - /** - * Rescan blockchain from a given height. - * @private - * @param {Number?} height - * @returns {Promise} - */ - - async scan(height) { - assert((height >>> 0) === height, 'Indexer: Must pass in a height.'); - - await this.rollback(height); - - const tip = this.state.height; - - this.logger.info( - 'Indexer is scanning %d blocks.', - tip - height + 1); - - try { - this.rescanning = true; - this.logger.debug('rescanning from %d to %d', height, tip); - await this.client.rescan(height); - } finally { - this.rescanning = false; - } - } - /** * Force a rescan. * @param {Number} height @@ -390,14 +328,38 @@ class Indexer extends EventEmitter { } /** - * Force a rescan (without a lock). + * Rescan blockchain from a given height. * @private * @param {Number} height * @returns {Promise} */ async _rescan(height) { - return this.scan(height); + assert((height >>> 0) === height, 'Must pass in a height.'); + + await this.rollback(height); + + const tip = this.state.height; + + this.logger.debug('Rescanning from %d to %d', height, tip); + + this.rescanning = true; + + for (let i = height; ; i++) { + const entry = await this.chain.getEntry(i); + if (!entry) + break; + + const block = await this.chain.getBlock(entry.hash); + assert(block); + + const view = await this.chain.getBlockView(block); + assert(view); + + await this.rescanBlock(entry, block, view); + } + + this.rescanning = false; } /** @@ -551,7 +513,7 @@ class Indexer extends EventEmitter { // processed (in the case of a crash). this.logger.warning('Already saw Indexer block (%d).', tip.height); } else if (tip.height !== this.state.startHeight + 1) { - await this.scan(this.state.height); + await this._rescan(this.state.height); return; } @@ -700,7 +662,7 @@ class IndexOptions { this.module = module; this.network = Network.primary; this.logger = Logger.global; - this.client = null; + this.blocks = null; this.chain = null; this.indexers = null; @@ -723,6 +685,14 @@ class IndexOptions { */ fromOptions(options) { + assert(options.blocks && typeof options.blocks === 'object', + 'Indexer requires a blockstore.'); + assert(options.chain && typeof options.chain === 'object', + 'Indexer requires chain.'); + + this.blocks = options.blocks; + this.chain = options.chain; + if (options.network != null) this.network = Network.get(options.network); @@ -731,20 +701,6 @@ class IndexOptions { this.logger = options.logger; } - if (options.client != null) { - assert(typeof options.client === 'object'); - this.client = options.client; - } - - if (options.chain != null) { - assert(typeof options.chain === 'object'); - this.client = new ChainClient(options.chain); - } - - if (!this.client) { - throw new Error('Client is required'); - } - if (options.prefix != null) { assert(typeof options.prefix === 'string'); this.prefix = options.prefix; diff --git a/lib/indexer/nullclient.js b/lib/indexer/nullclient.js deleted file mode 100644 index d71dba5ef..000000000 --- a/lib/indexer/nullclient.js +++ /dev/null @@ -1,142 +0,0 @@ -/*! - * nullclient.js - chain client for bcoin - * Copyright (c) 2018, the bcoin developers (MIT License). - * https://github.com/bcoin-org/bcoin - */ - -'use strict'; - -const assert = require('assert'); -const EventEmitter = require('events'); - -/** - * Null Client - * Sort of a fake local client for separation of concerns. - * @alias module:indexer.NullClient - */ - -class NullClient extends EventEmitter { - /** - * Create a client. - * @constructor - * @param {Chain} chain - */ - - constructor(chain) { - super(); - - this.chain = chain; - this.network = chain.network; - this.opened = false; - } - - /** - * Open the client. - * @returns {Promise} - */ - - async open(options) { - assert(!this.opened, 'NullClient is already open.'); - this.opened = true; - setImmediate(() => this.emit('connect')); - } - - /** - * Close the client. - * @returns {Promise} - */ - - async close() { - assert(this.opened, 'NullClient is not open.'); - this.opened = false; - setImmediate(() => this.emit('disconnect')); - } - - /** - * Get chain tip. - * @returns {Promise} - */ - - async getTip() { - const {hash, height, time} = this.network.genesis; - return { hash, height, time }; - } - - /** - * Get chain entry. - * @param {Hash} hash - * @returns {Promise} - Returns {@link ChainEntry}. - */ - - async getEntry(hash) { - return { hash, height: 0, time: 0 }; - } - - /** - * Get a coin (unspents only). - * @param {Hash} hash - * @param {Number} index - * @returns {Promise} - Returns {@link Coin}. - */ - - async getCoin(hash, index) { - return null; - } - - /** - * Get hash range. - * @param {Number} start - * @param {Number} end - * @returns {Promise} - */ - - async getHashes(start = -1, end = -1) { - return [this.network.genesis.hash]; - } - - /** - * Get block - * @param {Hash} hash - * @returns {Promise} - */ - - async getBlock(hash) { - return null; - } - - /** - * Get a historical block coin viewpoint. - * @param {Block} hash - * @returns {Promise} - Returns {@link CoinView}. - */ - - async getBlockView(block) { - return null; - } - - /** - * Get coin viewpoint. - * @param {TX} tx - * @returns {Promise} - Returns {@link CoinView}. - */ - - async getCoinView(tx) { - return null; - } - - /** - * Rescan for any missed blocks. - * @param {Number} start - Start block. - * @returns {Promise} - */ - - async rescan(start) { - ; - } -} - -/* - * Expose - */ - -module.exports = NullClient; diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index 8f68d6b48..8821fc307 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -184,8 +184,10 @@ class TXIndexer extends Indexer { return null; const record = TxRecord.fromRaw(raw); + const {block, offset, length} = record; + + const data = await this.blocks.read(block, offset, length); - const data = await this.read(record.block, record.offset, record.length); const tx = TX.fromRaw(data); const meta = TXMeta.fromTX(tx); @@ -228,7 +230,7 @@ class TXIndexer extends Indexer { */ async getSpentView(tx) { - const view = await this.client.getCoinView(tx); + const view = await this.chain.getCoinView(tx); for (const {prevout} of tx.inputs) { if (view.hasEntry(prevout)) diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 6b7c57532..b7969b672 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -157,25 +157,27 @@ class FullNode extends Node { }); // Indexers - this.txindex = null; - if (this.config.bool('index-tx')) + if (this.config.bool('index-tx')) { this.txindex = new TXIndexer({ network: this.network, logger: this.logger, + blocks: this.blocks, chain: this.chain, memory: this.config.bool('memory'), prefix: this.config.filter('index').str('prefix') || this.config.prefix }); + } - this.addrindex = null; - if (this.config.bool('index-address')) + if (this.config.bool('index-address')) { this.addrindex= new AddrIndexer({ network: this.network, logger: this.logger, + blocks: this.blocks, chain: this.chain, memory: this.config.bool('memory'), prefix: this.config.filter('index').str('prefix') || this.config.prefix }); + } this.init(); } diff --git a/test/indexer-test.js b/test/indexer-test.js index b7a610472..3f476da72 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -44,15 +44,17 @@ const wallet = new MemWallet({ }); const txindexer = new TXIndexer({ - 'memory': true, - 'network': network, - 'chain': chain + memory: true, + network, + chain, + blocks }); const addrindexer = new AddrIndexer({ - 'memory': true, - 'network': network, - 'chain': chain + memory: true, + network, + chain, + blocks }); describe('Indexer', function() { @@ -90,9 +92,6 @@ describe('Indexer', function() { }); it('should rescan and reindex 10 missed blocks', async () => { - await txindexer.disconnect(); - await addrindexer.disconnect(); - for (let i = 0; i < 10; i++) { const block = await cpu.mineBlock(); assert(block); @@ -100,17 +99,10 @@ describe('Indexer', function() { } assert.strictEqual(chain.height, 20); - - await txindexer.connect(); - await addrindexer.connect(); - - await new Promise(r => addrindexer.once('chain tip', r)); - assert.strictEqual(txindexer.state.startHeight, 20); assert.strictEqual(addrindexer.state.startHeight, 20); - const coins = - await addrindexer.getCoinsByAddress(miner.getAddress()); + const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); assert.strictEqual(coins.length, 20); for (const coin of coins) { From b9e8c7b8debce4945b9f86c1d309c58abad44f2f Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 22 Mar 2019 17:21:34 -0700 Subject: [PATCH 07/40] changelog: merge blockstore and indexer changes --- CHANGELOG.md | 112 ++++++++++++++++++++------------------------------- 1 file changed, 43 insertions(+), 69 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ba6922ae..763f6eb6e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,9 +5,19 @@ ### How to upgrade The way that block data is stored has changed for greater performance, -efficiency, reliability and portability. To upgrade to the new disk layout -it's necessary to move block data from LevelDB (e.g. `~/.bcoin/chain`) to -a new file based block storage (e.g. `~./.bcoin/blocks`). +efficiency, reliability and portability. + +- Block and undo block data has been moved from LevelDB into flat files. +- The transaction and address indexes have been moved into separate + LevelDB databases. +- The transaction has been de-duplicated, and will reduce disk usage by + half for those running with `txindex` enabled. +- The `txindex` and `addrindex` can now be enabled after the initial + block download. + +To upgrade to the new disk layout it's necessary to move block data +from LevelDB (e.g. `~/.bcoin/chain`) to a new file based block +storage (e.g. `~./.bcoin/blocks`). To do this you can run: ``` @@ -22,6 +32,17 @@ Alternatively, you can also sync the chain again, however the above migration will be faster as additional network bandwidth won't be used for downloading the blocks again. +For those with `txindex` and `addrindex` enabled there is an additional +step to cleanup and regenerate the indexes. + +``` bash +$ ./migrate/chaindb5to6.js /path/to/bcoin/chain +``` + +The indexes will be regenerated by rescanning the chain on next startup, +this process might take a while. Please take the potential downtime in +re-indexing into account before upgrading. + ### Wallet API changes #### HTTP @@ -76,6 +97,9 @@ for downloading the blocks again. - The option for `coin-cache` has been removed, this setting was causing issues during the sync with out-of-memory errors and was making performance worse instead of better. +- The database location for indexes can be configured via the + `--index-prefix` option. Default locations are `prefix` + `/index` + (e.g. `~/.bcoin/testnet/index/tx` and `~/.bcoin/testnet/index/addr`). ### Script changes @@ -97,6 +121,22 @@ for downloading the blocks again. - Config file `wallet.conf` won't be read during test runs that was causing issues with some testing environments. +### Chain changes + +- The method `getSpentView` accepts a `TXMeta` instead of `TX`. +- The transaction index methods are now implemented at `node.txindex`: + - `getMeta(hash)` + - `getTX(hash)` + - `hasTX(hash)` + - `getSpentView(tx)` +- The address index methods are now implemented at `node.addrindex`: + - `getCoinsByAddress(addrs)` + - `getHashesByAddress(addrs)` +- The following methods require `getHashesByAddress` in conjunction with + `node.txindex.getTX` and `node.txindex.getMeta` respectively. + - `getTXByAddress(addrs)` + - `getMetaByAddress(addrs)` + ### Other changes - A new module for storing block data in files. @@ -112,72 +152,6 @@ for downloading the blocks again. - Updates to dependencies including `bcrypto` to version > 3. - Various small fixes to run bcoin in a browser. -## v1.x.x - -### Migration - -The chain indexing subsystem has been refactored to be more modular and -flexible. - -A migration is required to cleanup the old indexes, if present. - -``` bash -$ ./migrate/chaindb4to5.js ~/.bcoin/chain -``` - -**Note**: if enabled, tx and addr indexes will be regenerated by rescanning the -chain on next startup, this process might take a while. Please take the -potential downtime in re-indexing into account before upgrading. - -Indexing has been made extensible so that new indexers such as a filter index -for BIP 157 can be implemented easily. - -Users can toggle any indexing on/off anytime before or after the initial sync. -The indexer will start resyncing the chain state and replaying blocks to -process them for indexing. Once caught up, it will just index new blocks. - -An index can be dropped by just deleting the corresponding database. - -### Notable Changes - -- `__lib/indexer__` `Indexer` implements the base methods which are common to - all indexers, including setting up the database, handling chain events such - as new block etc. - -- By default, bcoin ships `TXIndexer`, `AddrIndexer` implementations. These - indexers preserve all the existing indexing functionality and can be enabled - via the same flags i.e. `--index-tx` `--index-address`, for compatibility. - -- `Indexer` emits a `chain tip` with `[tip]`, where tip is an instance of - `BlockMeta`, when it is caught up with the chain. - -- Database location can be configured via `--index-prefix` config option. - Default locations are `prefix` + `/index` e.g.: `~/.bcoin/testnet/index/tx`, - `~/.bcoin/testnet/index/addr`. - -- `__/lib/blockchain/chain__` - `getSpentView` accepts a `TXMeta` insted of `TX` - -- `__/lib/blockchain/chain__` - the following methods have been moved out of - the chain to the indexers. Using the methods on the chain is deprecated: - - `node.txindex` implements: - - + `getMeta(hash)` - + `getTX(hash)` - + `hasTX(hash)` - + `getSpentView(tx)` - - `node.addrindex` implements: - - + `getCoinsByAddress(addrs)` - + `getHashesByAddress(addrs)` - - The following methods require `getHashesByAddress` in conjunction with - `node.txindex.getTX` and `node.txindex.getMeta` respectively. - - + `getTXByAddress(addrs)` - + `getMetaByAddress(addrs)` - ## v1.0.0 ### Migration From 05d55efb2283dbf65239635ac5d294ee77c8d841 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 22 Mar 2019 21:23:04 -0700 Subject: [PATCH 08/40] indexer: fix memory and cpu exhaustion for addrindex --- lib/indexer/addrindexer.js | 329 +++++++++++++++++++++++++++++++------ lib/node/fullnode.js | 25 ++- test/indexer-test.js | 245 +++++++++++++++++++++------ 3 files changed, 491 insertions(+), 108 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 765398f8b..d9375f0bb 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -8,6 +8,7 @@ const assert = require('assert'); const bdb = require('bdb'); +const bio = require('bufio'); const {BufferSet} = require('buffer-map'); const layout = require('./layout'); const Address = require('../primitives/address'); @@ -15,15 +16,93 @@ const Indexer = require('./indexer'); /* * AddrIndexer Database Layout: - * T[addr-hash][hash] -> dummy (tx by address) - * C[addr-hash][hash][index] -> dummy (coin by address) + * T[addr-hash][height][tx-index][hash] -> dummy (tx by address) + * C[addr-hash][height][tx-index][hash][coin-index] -> dummy (coin by address) + * x[addr-hash][hash] -> height and tx-index for tx + * y[addr-hash][hash][index] -> height, tx-index and coin-index for coin */ Object.assign(layout, { - T: bdb.key('T', ['hash', 'hash256']), - C: bdb.key('C', ['hash', 'hash256', 'uint32']) + T: bdb.key('T', ['hash', 'uint32', 'uint32', 'hash256']), + C: bdb.key('C', ['hash', 'uint32', 'uint32', 'hash256', 'uint32']), + x: bdb.key('x', ['hash', 'hash256']), + y: bdb.key('y', ['hash', 'hash256', 'uint32']) }); +/** + * Count + */ + +class Count { + /** + * Create count record. + * @constructor + * @param {Number} height + * @param {Number} index + */ + + constructor(height, index, coin) { + this.height = height >= 0 ? height : 0; + this.index = index >= 0 ? index : 0; + this.coin = coin >= 0 ? coin : -1; + + assert((this.height >>> 0) === this.height); + assert((this.index >>> 0) === this.index); + + if (coin) + assert((this.coin >>> 0) === this.coin); + } + + /** + * Serialize. + * @returns {Buffer} + */ + + toRaw() { + let len = 8; + if (this.coin >= 0) + len += 4; + + const bw = bio.write(len); + + bw.writeU32(this.height); + bw.writeU32(this.index); + + if (this.coin >= 0) + bw.writeU32(this.coin); + + return bw.render(); + } + + /** + * Deserialize. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + + this.height = br.readU32(); + this.index = br.readU32(); + + if (br.left() >= 4) + this.coin = br.readU32(); + + return this; + } + + /** + * Instantiate a count from a buffer. + * @param {Buffer} data + * @returns {Count} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } +} + /** * AddrIndexer * @alias module:indexer.AddrIndexer @@ -41,6 +120,8 @@ class AddrIndexer extends Indexer { super('addr', options); this.db = bdb.create(this.options); + this.maxTxs = options.maxTxs || 100; + this.maxCoins = options.maxCoins || 500; } /** @@ -53,12 +134,18 @@ class AddrIndexer extends Indexer { async indexBlock(entry, block, view) { const b = this.db.batch(); + const height = entry.height; for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; const hash = tx.hash(); - for (const addr of tx.getHashes(view)) - b.put(layout.T.encode(addr, hash), null); + + for (const addr of tx.getHashes(view)) { + const count = new Count(height, i); + + b.put(layout.T.encode(addr, height, i, hash), null); + b.put(layout.x.encode(addr, hash), count.toRaw()); + } if (!tx.isCoinbase()) { for (const {prevout} of tx.inputs) { @@ -71,18 +158,22 @@ class AddrIndexer extends Indexer { if (!addr) continue; - b.del(layout.C.encode(addr, hash, index)); + b.del(layout.C.encode(addr, height, i, hash, index)); + b.del(layout.y.encode(addr, hash, index)); } } - for (let i = 0; i < tx.outputs.length; i++) { - const output = tx.outputs[i]; + for (let j = 0; j < tx.outputs.length; j++) { + const output = tx.outputs[j]; const addr = output.getHash(); if (!addr) continue; - b.put(layout.C.encode(addr, hash, i), null); + const count = new Count(height, i, j); + + b.put(layout.C.encode(addr, height, i, hash, j), null); + b.put(layout.y.encode(addr, hash, j), count.toRaw()); } } @@ -99,11 +190,16 @@ class AddrIndexer extends Indexer { async unindexBlock(entry, block, view) { const b = this.db.batch(); + const height = entry.height; + for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; const hash = tx.hash(); - for (const addr of tx.getHashes(view)) - b.del(layout.T.encode(addr, hash)); + + for (const addr of tx.getHashes(view)) { + b.del(layout.T.encode(addr, height, i, hash)); + b.del(layout.x.encode(addr, hash)); + } if (!tx.isCoinbase()) { for (const {prevout} of tx.inputs) { @@ -116,18 +212,22 @@ class AddrIndexer extends Indexer { if (!addr) continue; - b.put(layout.C.encode(addr, hash, index), null); + const count = new Count(height, i); + + b.put(layout.C.encode(addr, height, i, hash, index), null); + b.put(layout.y.encode(addr, hash, index), count.toRaw()); } } - for (let i = 0; i < tx.outputs.length; i++) { - const output = tx.outputs[i]; + for (let j = 0; j < tx.outputs.length; j++) { + const output = tx.outputs[j]; const addr = output.getHash(); if (!addr) continue; - b.del(layout.C.encode(addr, hash, i)); + b.del(layout.C.encode(addr, height, i, hash, j)); + b.del(layout.y.encode(addr, hash, j)); } } @@ -136,33 +236,103 @@ class AddrIndexer extends Indexer { /** * Get all coins pertinent to an address. - * @param {Address[]} addrs + * @param {Address} addr + * @param {Object} options + * @param {Boolean} options.reverse + * @param {Boolean} options.limit * @returns {Promise} - Returns {@link Coin}[]. */ - async getCoinsByAddress(addrs) { - if (!Array.isArray(addrs)) - addrs = [addrs]; + async getCoinsByAddress(addr, options = {}) { + const coins = []; + + const {reverse} = options; + let {limit} = options; + + if (!limit) + limit = this.maxCoins; + + if (limit > this.maxCoins) + throw new Error('Limit above max of ${this.maxCoins}.'); + const hash = Address.getHash(addr); + + const keys = await this.db.keys({ + gte: layout.C.min(hash), + lte: layout.C.max(hash), + limit, + reverse, + parse: (key) => { + const [,,, txid, index] = layout.C.decode(key); + return [txid, index]; + } + }); + + for (const [hash, index] of keys) { + const coin = await this.chain.getCoin(hash, index); + assert(coin); + coins.push(coin); + } + + return coins; + } + + /** + * Get all coins pertinent to an address after a + * specific txid and output/coin index. + * @param {Address} addr + * @param {Object} options + * @param {Buffer} options.txid + * @param {Number} options.index + * @param {Boolean} options.limit + * @param {Boolean} options.reverse + * @returns {Promise} - Returns {@link Coin}[]. + */ + + async getCoinsByAddressAfter(addr, options = {}) { const coins = []; - for (const addr of addrs) { - const hash = Address.getHash(addr); + const {txid, index, reverse} = options; + let {limit} = options; - const keys = await this.db.keys({ - gte: layout.C.min(hash), - lte: layout.C.max(hash), - parse: (key) => { - const [, txid, index] = layout.C.decode(key); - return [txid, index]; - } - }); + if (!limit) + limit = this.maxCoins; + + if (limit > this.maxCoins) + throw new Error('Limit above max of ${this.maxCoins}.'); + + const hash = Address.getHash(addr); + + const raw = await this.db.get(layout.y.encode(hash, txid, index)); + + if (!raw) + return coins; - for (const [hash, index] of keys) { - const coin = await this.chain.getCoin(hash, index); - assert(coin); - coins.push(coin); + const count = Count.fromRaw(raw); + + const opts = { + limit, + reverse, + parse: (key) => { + const [,,, txid, index] = layout.C.decode(key); + return [txid, index]; } + }; + + if (!reverse) { + opts.gt = layout.C.min(hash, count.height, count.index, txid, count.coin); + opts.lte = layout.C.max(hash); + } else { + opts.gte = layout.C.min(hash); + opts.lt = layout.C.max(hash, count.height, count.index, txid, count.coin); + } + + const keys = await this.db.keys(opts); + + for (const [hash, index] of keys) { + const coin = await this.chain.getCoin(hash, index); + assert(coin); + coins.push(coin); } return coins; @@ -170,26 +340,93 @@ class AddrIndexer extends Indexer { /** * Get all transaction hashes to an address. - * @param {Address[]} addrs + * @param {Address} addr + * @param {Object} options + * @param {Boolean} options.limit + * @param {Boolean} options.reverse * @returns {Promise} - Returns {@link Hash}[]. */ - async getHashesByAddress(addrs) { + async getHashesByAddress(addr, options = {}) { const set = new BufferSet(); - for (const addr of addrs) { - const hash = Address.getHash(addr); + const {reverse} = options; + let {limit} = options; - await this.db.keys({ - gte: layout.T.min(hash), - lte: layout.T.max(hash), - parse: (key) => { - const [, txid] = layout.T.decode(key); - set.add(txid); - } - }); + if (!limit) + limit = this.maxTxs; + + if (limit > this.maxTxs) + throw new Error('Limit above max of ${this.maxTxs}.'); + + const hash = Address.getHash(addr); + + await this.db.keys({ + gte: layout.T.min(hash), + lte: layout.T.max(hash), + limit, + reverse, + parse: (key) => { + const [,,, txid] = layout.T.decode(key); + set.add(txid); + } + }); + + return set.toArray(); + } + + /** + * Get all transaction hashes to an address after + * a specific txid. + * @param {Address} addr + * @param {Object} options + * @param {Buffer} options.txid + * @param {Boolean} options.limit + * @param {Boolean} options.reverse + * @returns {Promise} - Returns {@link Hash}[]. + */ + + async getHashesByAddressAfter(addr, options = {}) { + const set = new BufferSet(); + + const hash = Address.getHash(addr); + + const {txid, reverse} = options; + let {limit} = options; + + if (!limit) + limit = this.maxTxs; + + if (limit > this.maxTxs) + throw new Error('Limit above max of ${this.maxTxs}.'); + + const raw = await this.db.get(layout.x.encode(hash, txid)); + + if (!raw) + return []; + + const count = Count.fromRaw(raw); + const {height, index} = count; + + const opts = { + limit, + reverse, + parse: (key) => { + const [,,, txid] = layout.T.decode(key); + set.add(txid); + } + }; + + if (!reverse) { + opts.gt = layout.T.min(hash, height, index, txid); + opts.lte = layout.T.max(hash); + } else { + opts.gte = layout.T.min(hash); + opts.lt = layout.T.max(hash, height, index, txid); } + await this.db.keys(opts); + return set.toArray(); } } diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index b7969b672..803e1fa37 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -454,17 +454,17 @@ class FullNode extends Node { /** * Get coins that pertain to an address from the mempool or chain database. * Takes into account spent coins in the mempool. - * @param {Address} addrs + * @param {Address} addr * @returns {Promise} - Returns {@link Coin}[]. */ - async getCoinsByAddress(addrs) { - const mempool = this.mempool.getCoinsByAddress(addrs); + async getCoinsByAddress(addr) { + const mempool = this.mempool.getCoinsByAddress(addr); if (!this.addrindex) return mempool; - const index = await this.addrindex.getCoinsByAddress(addrs); + const index = await this.addrindex.getCoinsByAddress(addr); const out = []; for (const coin of index) { @@ -485,18 +485,15 @@ class FullNode extends Node { /** * Retrieve transactions pertaining to an * address from the mempool or chain database. - * @param {Address} addrs + * @param {Address} addr * @returns {Promise} - Returns {@link TXMeta}[]. */ - async getMetaByAddress(addrs) { - const mempool = this.mempool.getMetaByAddress(addrs); + async getMetaByAddress(addr) { + const mempool = this.mempool.getMetaByAddress(addr); if (this.txindex && this.addrindex) { - if (!Array.isArray(addrs)) - addrs = [addrs]; - - const hashes = await this.addrindex.getHashesByAddress(addrs); + const hashes = await this.addrindex.getHashesByAddress(addr); const mtxs = []; for (const hash of hashes) { @@ -547,12 +544,12 @@ class FullNode extends Node { /** * Retrieve transactions pertaining to an * address from the mempool or chain database. - * @param {Address} addrs + * @param {Address} addr * @returns {Promise} - Returns {@link TX}[]. */ - async getTXByAddress(addrs) { - const mtxs = await this.getMetaByAddress(addrs); + async getTXByAddress(addr) { + const mtxs = await this.getMetaByAddress(addr); const out = []; for (const mtx of mtxs) diff --git a/test/indexer-test.js b/test/indexer-test.js index 3f476da72..b3e495613 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -60,7 +60,7 @@ const addrindexer = new AddrIndexer({ describe('Indexer', function() { this.timeout(45000); - it('should open indexer', async () => { + before(async () => { await blocks.open(); await chain.open(); await miner.open(); @@ -68,62 +68,211 @@ describe('Indexer', function() { await addrindexer.open(); }); - it('should index 10 blocks', async () => { - miner.addresses.length = 0; - miner.addAddress(wallet.getReceive()); - for (let i = 0; i < 10; i++) { - const block = await cpu.mineBlock(); - assert(block); - assert(await chain.add(block)); - } - - assert.strictEqual(chain.height, 10); - assert.strictEqual(txindexer.state.startHeight, 10); - assert.strictEqual(addrindexer.state.startHeight, 10); - - const coins = - await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 10); - - for (const coin of coins) { - const meta = await txindexer.getMeta(coin.hash); - assert.bufferEqual(meta.tx.hash(), coin.hash); - } + after(async () => { + await blocks.close(); + await chain.close(); + await miner.close(); + await txindexer.close(); + await addrindexer.close(); }); - it('should rescan and reindex 10 missed blocks', async () => { - for (let i = 0; i < 10; i++) { - const block = await cpu.mineBlock(); - assert(block); - assert(await chain.add(block)); - } + describe('index 10 blocks', function() { + before(async () => { + miner.addresses.length = 0; + miner.addAddress(wallet.getReceive()); - assert.strictEqual(chain.height, 20); - assert.strictEqual(txindexer.state.startHeight, 20); - assert.strictEqual(addrindexer.state.startHeight, 20); + for (let i = 0; i < 10; i++) { + const block = await cpu.mineBlock(); + assert(block); + assert(await chain.add(block)); + } - const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 20); + assert.strictEqual(chain.height, 10); + assert.strictEqual(txindexer.state.startHeight, 10); + assert.strictEqual(addrindexer.state.startHeight, 10); + }); - for (const coin of coins) { - const meta = await txindexer.getMeta(coin.hash); - assert.bufferEqual(meta.tx.hash(), coin.hash); - } + it('should get coins by address', async () => { + const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 10); + }); + + it('should get txs by address', async () => { + const hashes = await addrindexer.getHashesByAddress(miner.getAddress()); + assert.strictEqual(hashes.length, 10); + }); + + it('should get txs for coins by address', async () => { + const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 10); + + for (const coin of coins) { + const meta = await txindexer.getMeta(coin.hash); + assert.bufferEqual(meta.tx.hash(), coin.hash); + } + }); + + it('should coins by address (limit)', async () => { + const addr = miner.getAddress(); + const coins = await addrindexer.getCoinsByAddress(addr, {limit: 1}); + assert.strictEqual(coins.length, 1); + }); + + it('should coins by address (reverse)', async () => { + const addr = miner.getAddress(); + const coins = await addrindexer.getCoinsByAddress( + addr, {reverse: false}); + + assert.strictEqual(coins.length, 10); + + const reversed = await addrindexer.getCoinsByAddress( + addr, {reverse: true}); + + assert.strictEqual(reversed.length, 10); + + for (let i = 0; i < 10; i++) + assert.deepEqual(coins[i], reversed[9 - i]); + }); + + it('should get txs by address (limit)', async () => { + const addr = miner.getAddress(); + const hashes = await addrindexer.getHashesByAddress(addr, {limit: 1}); + assert.strictEqual(hashes.length, 1); + }); + + it('should get txs by address (reverse)', async () => { + const addr = miner.getAddress(); + const hashes = await addrindexer.getHashesByAddress( + addr, {reverse: false}); + + assert.strictEqual(hashes.length, 10); + + const reversed = await addrindexer.getHashesByAddress( + addr, {reverse: true}); + + assert.strictEqual(reversed.length, 10); + + for (let i = 0; i < 10; i++) + assert.deepEqual(hashes[i], reversed[9 - i]); + }); + + it('should coins by address after txid and index', async () => { + const addr = miner.getAddress(); + const coins = await addrindexer.getCoinsByAddress(addr, {limit: 5}); + + assert.strictEqual(coins.length, 5); + + const txid = coins[4].hash; + const index = coins[4].index; + + const next = await addrindexer.getCoinsByAddressAfter( + addr, {txid: txid, index: index, limit: 5}); + + assert.strictEqual(next.length, 5); + + const all = await addrindexer.getCoinsByAddress(addr); + assert.strictEqual(all.length, 10); + + assert.deepEqual(coins.concat(next), all); + }); + + it('should coins by address after txid and index (reverse)', async () => { + const addr = miner.getAddress(); + const coins = await addrindexer.getCoinsByAddress( + addr, {limit: 5, reverse: true}); + + assert.strictEqual(coins.length, 5); + + const txid = coins[4].hash; + const index = coins[4].index; + + const next = await addrindexer.getCoinsByAddressAfter( + addr, {txid: txid, index: index, limit: 5, reverse: true}); + + assert.strictEqual(next.length, 5); + + const all = await addrindexer.getCoinsByAddress(addr, {reverse: true}); + assert.strictEqual(all.length, 10); + + assert.deepEqual(coins.concat(next), all); + }); + + it('should txs by address after txid', async () => { + const addr = miner.getAddress(); + const hashes = await addrindexer.getHashesByAddress(addr, {limit: 5}); + + assert.strictEqual(hashes.length, 5); + + const txid = hashes[4]; + + const next = await addrindexer.getHashesByAddressAfter( + addr, {txid: txid, limit: 5}); + + assert.strictEqual(next.length, 5); + + const all = await addrindexer.getHashesByAddress(addr); + assert.strictEqual(all.length, 10); + + assert.deepEqual(hashes.concat(next), all); + }); + + it('should txs by address after txid (reverse)', async () => { + const addr = miner.getAddress(); + const hashes = await addrindexer.getHashesByAddress( + addr, {limit: 5, reverse: true}); + + assert.strictEqual(hashes.length, 5); + + const txid = hashes[4]; + + const next = await addrindexer.getHashesByAddressAfter( + addr, {txid: txid, limit: 5, reverse: true}); + + assert.strictEqual(next.length, 5); + + const all = await addrindexer.getHashesByAddress( + addr, {reverse: true}); + + assert.strictEqual(all.length, 10); + + assert.deepEqual(hashes.concat(next), all); + }); }); - it('should handle indexing a reorg', async () => { - await reorg(chain, cpu, 10); + describe('rescan and reorg', function() { + it('should rescan and reindex 10 missed blocks', async () => { + for (let i = 0; i < 10; i++) { + const block = await cpu.mineBlock(); + assert(block); + assert(await chain.add(block)); + } + + assert.strictEqual(chain.height, 20); + assert.strictEqual(txindexer.state.startHeight, 20); + assert.strictEqual(addrindexer.state.startHeight, 20); + + const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 20); + + for (const coin of coins) { + const meta = await txindexer.getMeta(coin.hash); + assert.bufferEqual(meta.tx.hash(), coin.hash); + } + }); + + it('should handle indexing a reorg', async () => { + await reorg(chain, cpu, 10); - assert.strictEqual(txindexer.state.startHeight, 31); - assert.strictEqual(addrindexer.state.startHeight, 31); + assert.strictEqual(txindexer.state.startHeight, 31); + assert.strictEqual(addrindexer.state.startHeight, 31); - const coins = - await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 31); + const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); + assert.strictEqual(coins.length, 31); - for (const coin of coins) { - const meta = await txindexer.getMeta(coin.hash); - assert.bufferEqual(meta.tx.hash(), coin.hash); - } + for (const coin of coins) { + const meta = await txindexer.getMeta(coin.hash); + assert.bufferEqual(meta.tx.hash(), coin.hash); + } + }); }); }); From 7dc55c9c3cc4690e4c050f10d6802d3bda0fdea4 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 25 Mar 2019 15:30:12 -0700 Subject: [PATCH 09/40] indexer: remove address coin index --- lib/indexer/addrindexer.js | 234 +++++-------------------------------- lib/node/fullnode.js | 31 ----- lib/node/http.js | 54 --------- test/indexer-test.js | 98 ++-------------- test/node-test.js | 32 ----- 5 files changed, 42 insertions(+), 407 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index d9375f0bb..a9c5d5939 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -16,17 +16,24 @@ const Indexer = require('./indexer'); /* * AddrIndexer Database Layout: - * T[addr-hash][height][tx-index][hash] -> dummy (tx by address) - * C[addr-hash][height][tx-index][hash][coin-index] -> dummy (coin by address) - * x[addr-hash][hash] -> height and tx-index for tx - * y[addr-hash][hash][index] -> height, tx-index and coin-index for coin -*/ + * A[addr-hash][height][index][hash] -> dummy (tx by address, height and index) + * a[addr-hash][hash] -> (tx height and index by address and tx hash) + * + * The database layout is organized so that transactions are sorted in + * the same order as the blocks (e.g. chronological order) using the block + * height and transaction index. This provides the ability to query for + * sets of transactions within that order. For a wallet that would like to + * synchronize or rescan, this could be a query for all of the latest + * transactions, but not for earlier transactions that are already known. + * Furthermore, to be able to query for all transactions in multiple sets + * without reference to height and index, there is a mapping from address + * and tx hash to the height and index as an entry point to the + * ordered transactions. + */ Object.assign(layout, { - T: bdb.key('T', ['hash', 'uint32', 'uint32', 'hash256']), - C: bdb.key('C', ['hash', 'uint32', 'uint32', 'hash256', 'uint32']), - x: bdb.key('x', ['hash', 'hash256']), - y: bdb.key('y', ['hash', 'hash256', 'uint32']) + A: bdb.key('A', ['hash', 'uint32', 'uint32', 'hash256']), + a: bdb.key('a', ['hash', 'hash256']) }); /** @@ -42,15 +49,11 @@ class Count { */ constructor(height, index, coin) { - this.height = height >= 0 ? height : 0; - this.index = index >= 0 ? index : 0; - this.coin = coin >= 0 ? coin : -1; + this.height = height || 0; + this.index = index || 0; assert((this.height >>> 0) === this.height); assert((this.index >>> 0) === this.index); - - if (coin) - assert((this.coin >>> 0) === this.coin); } /** @@ -59,18 +62,11 @@ class Count { */ toRaw() { - let len = 8; - if (this.coin >= 0) - len += 4; - - const bw = bio.write(len); + const bw = bio.write(8); bw.writeU32(this.height); bw.writeU32(this.index); - if (this.coin >= 0) - bw.writeU32(this.coin); - return bw.render(); } @@ -86,9 +82,6 @@ class Count { this.height = br.readU32(); this.index = br.readU32(); - if (br.left() >= 4) - this.coin = br.readU32(); - return this; } @@ -121,7 +114,6 @@ class AddrIndexer extends Indexer { this.db = bdb.create(this.options); this.maxTxs = options.maxTxs || 100; - this.maxCoins = options.maxCoins || 500; } /** @@ -143,37 +135,8 @@ class AddrIndexer extends Indexer { for (const addr of tx.getHashes(view)) { const count = new Count(height, i); - b.put(layout.T.encode(addr, height, i, hash), null); - b.put(layout.x.encode(addr, hash), count.toRaw()); - } - - if (!tx.isCoinbase()) { - for (const {prevout} of tx.inputs) { - const {hash, index} = prevout; - const coin = view.getOutput(prevout); - assert(coin); - - const addr = coin.getHash(); - - if (!addr) - continue; - - b.del(layout.C.encode(addr, height, i, hash, index)); - b.del(layout.y.encode(addr, hash, index)); - } - } - - for (let j = 0; j < tx.outputs.length; j++) { - const output = tx.outputs[j]; - const addr = output.getHash(); - - if (!addr) - continue; - - const count = new Count(height, i, j); - - b.put(layout.C.encode(addr, height, i, hash, j), null); - b.put(layout.y.encode(addr, hash, j), count.toRaw()); + b.put(layout.A.encode(addr, height, i, hash), null); + b.put(layout.a.encode(addr, hash), count.toRaw()); } } @@ -197,147 +160,14 @@ class AddrIndexer extends Indexer { const hash = tx.hash(); for (const addr of tx.getHashes(view)) { - b.del(layout.T.encode(addr, height, i, hash)); - b.del(layout.x.encode(addr, hash)); - } - - if (!tx.isCoinbase()) { - for (const {prevout} of tx.inputs) { - const {hash, index} = prevout; - const coin = view.getOutput(prevout); - assert(coin); - - const addr = coin.getHash(); - - if (!addr) - continue; - - const count = new Count(height, i); - - b.put(layout.C.encode(addr, height, i, hash, index), null); - b.put(layout.y.encode(addr, hash, index), count.toRaw()); - } - } - - for (let j = 0; j < tx.outputs.length; j++) { - const output = tx.outputs[j]; - const addr = output.getHash(); - - if (!addr) - continue; - - b.del(layout.C.encode(addr, height, i, hash, j)); - b.del(layout.y.encode(addr, hash, j)); + b.del(layout.A.encode(addr, height, i, hash)); + b.del(layout.a.encode(addr, hash)); } } return b.write(); } - /** - * Get all coins pertinent to an address. - * @param {Address} addr - * @param {Object} options - * @param {Boolean} options.reverse - * @param {Boolean} options.limit - * @returns {Promise} - Returns {@link Coin}[]. - */ - - async getCoinsByAddress(addr, options = {}) { - const coins = []; - - const {reverse} = options; - let {limit} = options; - - if (!limit) - limit = this.maxCoins; - - if (limit > this.maxCoins) - throw new Error('Limit above max of ${this.maxCoins}.'); - - const hash = Address.getHash(addr); - - const keys = await this.db.keys({ - gte: layout.C.min(hash), - lte: layout.C.max(hash), - limit, - reverse, - parse: (key) => { - const [,,, txid, index] = layout.C.decode(key); - return [txid, index]; - } - }); - - for (const [hash, index] of keys) { - const coin = await this.chain.getCoin(hash, index); - assert(coin); - coins.push(coin); - } - - return coins; - } - - /** - * Get all coins pertinent to an address after a - * specific txid and output/coin index. - * @param {Address} addr - * @param {Object} options - * @param {Buffer} options.txid - * @param {Number} options.index - * @param {Boolean} options.limit - * @param {Boolean} options.reverse - * @returns {Promise} - Returns {@link Coin}[]. - */ - - async getCoinsByAddressAfter(addr, options = {}) { - const coins = []; - - const {txid, index, reverse} = options; - let {limit} = options; - - if (!limit) - limit = this.maxCoins; - - if (limit > this.maxCoins) - throw new Error('Limit above max of ${this.maxCoins}.'); - - const hash = Address.getHash(addr); - - const raw = await this.db.get(layout.y.encode(hash, txid, index)); - - if (!raw) - return coins; - - const count = Count.fromRaw(raw); - - const opts = { - limit, - reverse, - parse: (key) => { - const [,,, txid, index] = layout.C.decode(key); - return [txid, index]; - } - }; - - if (!reverse) { - opts.gt = layout.C.min(hash, count.height, count.index, txid, count.coin); - opts.lte = layout.C.max(hash); - } else { - opts.gte = layout.C.min(hash); - opts.lt = layout.C.max(hash, count.height, count.index, txid, count.coin); - } - - const keys = await this.db.keys(opts); - - for (const [hash, index] of keys) { - const coin = await this.chain.getCoin(hash, index); - assert(coin); - coins.push(coin); - } - - return coins; - } - /** * Get all transaction hashes to an address. * @param {Address} addr @@ -362,12 +192,12 @@ class AddrIndexer extends Indexer { const hash = Address.getHash(addr); await this.db.keys({ - gte: layout.T.min(hash), - lte: layout.T.max(hash), + gte: layout.A.min(hash), + lte: layout.A.max(hash), limit, reverse, parse: (key) => { - const [,,, txid] = layout.T.decode(key); + const [,,, txid] = layout.A.decode(key); set.add(txid); } }); @@ -400,7 +230,7 @@ class AddrIndexer extends Indexer { if (limit > this.maxTxs) throw new Error('Limit above max of ${this.maxTxs}.'); - const raw = await this.db.get(layout.x.encode(hash, txid)); + const raw = await this.db.get(layout.a.encode(hash, txid)); if (!raw) return []; @@ -412,17 +242,17 @@ class AddrIndexer extends Indexer { limit, reverse, parse: (key) => { - const [,,, txid] = layout.T.decode(key); + const [,,, txid] = layout.A.decode(key); set.add(txid); } }; if (!reverse) { - opts.gt = layout.T.min(hash, height, index, txid); - opts.lte = layout.T.max(hash); + opts.gt = layout.A.min(hash, height, index, txid); + opts.lte = layout.A.max(hash); } else { - opts.gte = layout.T.min(hash); - opts.lt = layout.T.max(hash, height, index, txid); + opts.gte = layout.A.min(hash); + opts.lt = layout.A.max(hash, height, index, txid); } await this.db.keys(opts); diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 803e1fa37..4e6747d40 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -451,37 +451,6 @@ class FullNode extends Node { return this.chain.getCoin(hash, index); } - /** - * Get coins that pertain to an address from the mempool or chain database. - * Takes into account spent coins in the mempool. - * @param {Address} addr - * @returns {Promise} - Returns {@link Coin}[]. - */ - - async getCoinsByAddress(addr) { - const mempool = this.mempool.getCoinsByAddress(addr); - - if (!this.addrindex) - return mempool; - - const index = await this.addrindex.getCoinsByAddress(addr); - const out = []; - - for (const coin of index) { - const spent = this.mempool.isSpent(coin.hash, coin.index); - - if (spent) - continue; - - out.push(coin); - } - - for (const coin of mempool) - out.push(coin); - - return out; - } - /** * Retrieve transactions pertaining to an * address from the mempool or chain database. diff --git a/lib/node/http.js b/lib/node/http.js index 2c9b4dfa4..40e80f323 100644 --- a/lib/node/http.js +++ b/lib/node/http.js @@ -149,24 +149,6 @@ class HTTP extends Server { }); }); - // UTXO by address - this.get('/coin/address/:address', async (req, res) => { - const valid = Validator.fromRequest(req); - const address = valid.str('address'); - - enforce(address, 'Address is required.'); - enforce(!this.chain.options.spv, 'Cannot get coins in SPV mode.'); - - const addr = Address.fromString(address, this.network); - const coins = await this.node.getCoinsByAddress(addr); - const result = []; - - for (const coin of coins) - result.push(coin.getJSON(this.network)); - - res.json(200, result); - }); - // UTXO by id this.get('/coin/:hash/:index', async (req, res) => { const valid = Validator.fromRequest(req); @@ -187,23 +169,6 @@ class HTTP extends Server { res.json(200, coin.getJSON(this.network)); }); - // Bulk read UTXOs - this.post('/coin/address', async (req, res) => { - const valid = Validator.fromRequest(req); - const address = valid.array('addresses'); - - enforce(address, 'Address is required.'); - enforce(!this.chain.options.spv, 'Cannot get coins in SPV mode.'); - - const coins = await this.node.getCoinsByAddress(address); - const result = []; - - for (const coin of coins) - result.push(coin.getJSON(this.network)); - - res.json(200, result); - }); - // TX by hash this.get('/tx/:hash', async (req, res) => { const valid = Validator.fromRequest(req); @@ -244,25 +209,6 @@ class HTTP extends Server { res.json(200, result); }); - // Bulk read TXs - this.post('/tx/address', async (req, res) => { - const valid = Validator.fromRequest(req); - const address = valid.array('addresses'); - - enforce(address, 'Address is required.'); - enforce(!this.chain.options.spv, 'Cannot get TX in SPV mode.'); - - const metas = await this.node.getMetaByAddress(address); - const result = []; - - for (const meta of metas) { - const view = await this.node.getMetaView(meta); - result.push(meta.getJSON(this.network, view, this.chain.height)); - } - - res.json(200, result); - }); - // Block by hash/height this.get('/block/:block', async (req, res) => { const valid = Validator.fromRequest(req); diff --git a/test/indexer-test.js b/test/indexer-test.js index b3e495613..085a7c7d3 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -92,48 +92,11 @@ describe('Indexer', function() { assert.strictEqual(addrindexer.state.startHeight, 10); }); - it('should get coins by address', async () => { - const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 10); - }); - it('should get txs by address', async () => { const hashes = await addrindexer.getHashesByAddress(miner.getAddress()); assert.strictEqual(hashes.length, 10); }); - it('should get txs for coins by address', async () => { - const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 10); - - for (const coin of coins) { - const meta = await txindexer.getMeta(coin.hash); - assert.bufferEqual(meta.tx.hash(), coin.hash); - } - }); - - it('should coins by address (limit)', async () => { - const addr = miner.getAddress(); - const coins = await addrindexer.getCoinsByAddress(addr, {limit: 1}); - assert.strictEqual(coins.length, 1); - }); - - it('should coins by address (reverse)', async () => { - const addr = miner.getAddress(); - const coins = await addrindexer.getCoinsByAddress( - addr, {reverse: false}); - - assert.strictEqual(coins.length, 10); - - const reversed = await addrindexer.getCoinsByAddress( - addr, {reverse: true}); - - assert.strictEqual(reversed.length, 10); - - for (let i = 0; i < 10; i++) - assert.deepEqual(coins[i], reversed[9 - i]); - }); - it('should get txs by address (limit)', async () => { const addr = miner.getAddress(); const hashes = await addrindexer.getHashesByAddress(addr, {limit: 1}); @@ -156,47 +119,6 @@ describe('Indexer', function() { assert.deepEqual(hashes[i], reversed[9 - i]); }); - it('should coins by address after txid and index', async () => { - const addr = miner.getAddress(); - const coins = await addrindexer.getCoinsByAddress(addr, {limit: 5}); - - assert.strictEqual(coins.length, 5); - - const txid = coins[4].hash; - const index = coins[4].index; - - const next = await addrindexer.getCoinsByAddressAfter( - addr, {txid: txid, index: index, limit: 5}); - - assert.strictEqual(next.length, 5); - - const all = await addrindexer.getCoinsByAddress(addr); - assert.strictEqual(all.length, 10); - - assert.deepEqual(coins.concat(next), all); - }); - - it('should coins by address after txid and index (reverse)', async () => { - const addr = miner.getAddress(); - const coins = await addrindexer.getCoinsByAddress( - addr, {limit: 5, reverse: true}); - - assert.strictEqual(coins.length, 5); - - const txid = coins[4].hash; - const index = coins[4].index; - - const next = await addrindexer.getCoinsByAddressAfter( - addr, {txid: txid, index: index, limit: 5, reverse: true}); - - assert.strictEqual(next.length, 5); - - const all = await addrindexer.getCoinsByAddress(addr, {reverse: true}); - assert.strictEqual(all.length, 10); - - assert.deepEqual(coins.concat(next), all); - }); - it('should txs by address after txid', async () => { const addr = miner.getAddress(); const hashes = await addrindexer.getHashesByAddress(addr, {limit: 5}); @@ -251,12 +173,12 @@ describe('Indexer', function() { assert.strictEqual(txindexer.state.startHeight, 20); assert.strictEqual(addrindexer.state.startHeight, 20); - const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 20); + const hashes = await addrindexer.getHashesByAddress(miner.getAddress()); + assert.strictEqual(hashes.length, 20); - for (const coin of coins) { - const meta = await txindexer.getMeta(coin.hash); - assert.bufferEqual(meta.tx.hash(), coin.hash); + for (const hash of hashes) { + const meta = await txindexer.getMeta(hash); + assert.bufferEqual(meta.tx.hash(), hash); } }); @@ -266,12 +188,12 @@ describe('Indexer', function() { assert.strictEqual(txindexer.state.startHeight, 31); assert.strictEqual(addrindexer.state.startHeight, 31); - const coins = await addrindexer.getCoinsByAddress(miner.getAddress()); - assert.strictEqual(coins.length, 31); + const hashes = await addrindexer.getHashesByAddress(miner.getAddress()); + assert.strictEqual(hashes.length, 31); - for (const coin of coins) { - const meta = await txindexer.getMeta(coin.hash); - assert.bufferEqual(meta.tx.hash(), coin.hash); + for (const hash of hashes) { + const meta = await txindexer.getMeta(hash); + assert.bufferEqual(meta.tx.hash(), hash); } }); }); diff --git a/test/node-test.js b/test/node-test.js index fa017fa13..9d2c37efe 100644 --- a/test/node-test.js +++ b/test/node-test.js @@ -775,38 +775,6 @@ describe('Node', function() { assert.strictEqual(meta.tx.txid(), tx2.txid()); }); - it('should get coin/tx by addr', async () => { - const addr = await wallet.receiveAddress(); - const mtx = await wallet.createTX({ - rate: 100000, - outputs: [{ - value: 100000, - address: addr - }] - }); - - await wallet.sign(mtx); - - const tx = mtx.toTX(); - const job = await miner.createJob(); - - job.addTX(tx, mtx.view); - job.refresh(); - - const block = await job.mineAsync(); - await chain.add(block); - - await new Promise(r => setTimeout(r, 300)); - - const txs = await node.getTXByAddress(addr.hash); - const tx2 = txs[0]; - assert.strictEqual(tx.txid(), tx2.txid()); - - const coins = await node.getCoinsByAddress(addr.hash); - const coin = coins[0]; - assert.strictEqual(tx.txid(), coin.txid()); - }); - it('should cleanup', async () => { consensus.COINBASE_MATURITY = 100; await node.close(); From 32904c7940315d612aad9f02b43bea6ef16df552 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 25 Mar 2019 15:55:18 -0700 Subject: [PATCH 10/40] changelog: fix memory and cpu exhaustion for addrindex --- CHANGELOG.md | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 763f6eb6e..e22ca61d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,8 @@ efficiency, reliability and portability. half for those running with `txindex` enabled. - The `txindex` and `addrindex` can now be enabled after the initial block download. +- The `addrindex` has been sorted to support querying for large sets + of results, and will no longer cause CPU and memory exhaustion issues. To upgrade to the new disk layout it's necessary to move block data from LevelDB (e.g. `~/.bcoin/chain`) to a new file based block @@ -71,6 +73,30 @@ re-indexing into account before upgrading. `iswitness`, `witness_version` and `witness_program`. (a28ffa272a3c4d90d0273d9aa223a23becc08e0e) +### Node API changes + +#### HTTP + +Several CPU and memory exhaustion issues have been resolved with some +additional arguments for querying multiple sets of results for addresses +that have many transactions. + +- `GET /tx/address/:address` has several new arguments: `after`, `reverse` + and `limit`. The `after` argument is a txid, for querying additional results + after a previous result. The `reverse` argument will change the order that + results are returned, the default order is oldest to latest. The `limit` + argument can be used to give results back in smaller sets if necessary. +- `POST /tx/address` This has been deprecated, instead query for each address + individually with `GET /tx/address/:address` with the expectation that + there could be _many_ results that would additionally need to be queried + in a subsequent query using the `after` argument to request the next set. +- `POST /coin/address` and `GET /coin/address/:address` are deprecated as + coins can be generated using results from `/tx/address/:address` and + querying by only a range of the latest transactions to stay synchronized. + Coins could otherwise be removed from results at any point, and thus the + entire set of results would need to be queried every time to discover + which coins have been spent and are currently available. + ### Network changes - Regtest params have been updated to correspond with other bitcoin @@ -129,13 +155,14 @@ re-indexing into account before upgrading. - `getTX(hash)` - `hasTX(hash)` - `getSpentView(tx)` -- The address index methods are now implemented at `node.addrindex`: - - `getCoinsByAddress(addrs)` +- The address index method is now implemented at `node.addrindex`: - `getHashesByAddress(addrs)` - The following methods require `getHashesByAddress` in conjunction with `node.txindex.getTX` and `node.txindex.getMeta` respectively. - `getTXByAddress(addrs)` - `getMetaByAddress(addrs)` +- The following method has been deprecated: + - `getCoinsByAddress(addrs)` ### Other changes From 7342952ec48748d2e5496e233842bc4a7d91edfc Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Wed, 27 Mar 2019 18:58:27 +0530 Subject: [PATCH 11/40] migrate: update chaindb5to6 migration --- migrate/chaindb5to6.js | 56 +++++++++++++++++++++++++++++++----------- 1 file changed, 41 insertions(+), 15 deletions(-) diff --git a/migrate/chaindb5to6.js b/migrate/chaindb5to6.js index 29d2642ac..8a85b607f 100644 --- a/migrate/chaindb5to6.js +++ b/migrate/chaindb5to6.js @@ -17,6 +17,34 @@ const db = bdb.create({ createIfMissing: false }); +async function updateVersion() { + const ver = await checkVersion(); + + console.log('Updating version to %d.', ver + 1); + + const buf = Buffer.allocUnsafe(5 + 4); + buf.write('chain', 0, 'ascii'); + buf.writeUInt32LE(6, 5, true); + + const parent = db.batch(); + parent.put(layout.V.encode(), buf); + await parent.write(); +} + +async function checkVersion() { + console.log('Checking version.'); + + const data = await db.get(layout.V.encode()); + assert(data, 'No version.'); + + const ver = data.readUInt32LE(5, true); + + if (ver !== 5) + throw Error(`DB is version ${ver}.`); + + return ver; +} + async function removeKey(name, key) { const iter = db.iterator({ gte: key.min(), @@ -43,6 +71,16 @@ async function removeKey(name, key) { console.log('Cleaned up %d %s index records.', total, name); } +async function migrateIndexes() { + const t = bdb.key('t', ['hash256']); + const T = bdb.key('T', ['hash', 'hash256']); + const C = bdb.key('C', ['hash', 'hash256', 'uint32']); + + await removeKey('hash -> tx', t); + await removeKey('addr -> tx', T); + await removeKey('addr -> coin', C); +} + /* * Execute */ @@ -51,24 +89,12 @@ async function removeKey(name, key) { await db.open(); console.log('Opened %s.', process.argv[2]); - console.log('Checking version.'); - await db.verify(layout.V.build(), 'chain', 5); - const t = bdb.key('t', ['hash256']); - const T = bdb.key('T', ['hash', 'hash256']); - const C = bdb.key('C', ['hash', 'hash256', 'uint32']); + await checkVersion(); + await migrateIndexes(); + await updateVersion(); - await removeKey('hash -> tx', t); - await removeKey('addr -> tx', T); - await removeKey('addr -> coin', C); - - console.log('Compacting database...'); await db.compactRange(); - - console.log('Updating version to %d.', 6); - await db.del(layout.V.build()); - await db.verify(layout.V.build(), 'chain', 6); - await db.close(); })().then(() => { console.log('Migration complete.'); From 9f86dbca281a3e4f959e4dbcc14283c157b2fd53 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Wed, 27 Mar 2019 19:01:24 +0530 Subject: [PATCH 12/40] chaindb: bump to v6 --- lib/blockchain/chaindb.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 9f7a779e8..2ca1448ad 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -60,7 +60,7 @@ class ChainDB { this.logger.info('Opening ChainDB...'); await this.db.open(); - await this.db.verify(layout.V.encode(), 'chain', 5); + await this.db.verify(layout.V.encode(), 'chain', 6); const state = await this.getState(); From aa3f02d585692f2161a1bfd03d516620f16bc5b6 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 25 Mar 2019 19:31:19 -0700 Subject: [PATCH 13/40] test: initial http tests for indexer --- test/indexer-test.js | 112 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 111 insertions(+), 1 deletion(-) diff --git a/test/indexer-test.js b/test/indexer-test.js index 085a7c7d3..bb101533d 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -12,8 +12,10 @@ const MemWallet = require('./util/memwallet'); const TXIndexer = require('../lib/indexer/txindexer'); const AddrIndexer = require('../lib/indexer/addrindexer'); const BlockStore = require('../lib/blockstore/level'); +const FullNode = require('../lib/node/fullnode'); const Network = require('../lib/protocol/network'); const network = Network.get('regtest'); +const {NodeClient, WalletClient} = require('bclient'); const workers = new WorkerPool({ enabled: true @@ -58,7 +60,7 @@ const addrindexer = new AddrIndexer({ }); describe('Indexer', function() { - this.timeout(45000); + this.timeout(120000); before(async () => { await blocks.open(); @@ -197,4 +199,112 @@ describe('Indexer', function() { } }); }); + + describe('http', function() { + this.timeout(120000); + + let node, nclient, wclient = null; + + const vectors = [ + // Secret for the vectors: + // cVDJUtDjdaM25yNVVDLLX3hcHUfth4c7tY3rSc4hy9e8ibtCuj6G + // {addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', amount: 19.99}, + {addr: 'muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi', amount: 1.99} + ]; + + const txids = []; + + const ports = { + p2p: 49331, + node: 49332, + wallet: 49333 + }; + + before(async () => { + this.timeout(120000); + + // Setup a testing node with txindex and addrindex + // both enabled. + node = new FullNode({ + network: 'regtest', + apiKey: 'foo', + walletAuth: true, + memory: true, + workers: true, + indexTX: true, + indexAddress: true, + port: ports.p2p, + httpPort: ports.node, + plugins: [require('../lib/wallet/plugin')], + env: { + 'BCOIN_WALLET_HTTP_PORT': ports.wallet.toString() + } + }); + + await node.open(); + + // Setup the node client to make calls to the node + // to generate blocks and other tasks. + nclient = new NodeClient({ + port: ports.node, + apiKey: 'foo', + timeout: 120000 + }); + + await nclient.open(); + + // Setup a test wallet to generate transactions for + // testing various scenarios. + wclient = new WalletClient({ + port: ports.wallet, + apiKey: 'foo', + timeout: 120000 + }); + + await wclient.open(); + + // Generate initial set of transactions and + // send the coinbase to alice. + const coinbase = await wclient.execute( + 'getnewaddress', ['default']); + + const blocks = await nclient.execute( + 'generatetoaddress', [120, coinbase]); + + assert.equal(blocks.length, 120); + + // Send to the vector addresses for several blocks. + for (let i = 0; i < 10; i++) { + for (const v of vectors) { + const txid = await wclient.execute( + 'sendtoaddress', [v.addr, v.amount]); + + txids.push(txid); + } + + const blocks = await nclient.execute( + 'generatetoaddress', [1, coinbase]); + + assert.equal(blocks.length, 1); + } + }); + + after(async () => { + await nclient.close(); + await wclient.close(); + await node.close(); + }); + + it('will get txs by address', async () => { + for (const v of vectors) { + const res = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {}); + + assert.equal(res.length, 10); + + for (const tx of res) + assert(txids.includes(tx.hash)); + } + }); + }); }); From e2a6a92ebc14dd43c97a39d391a477e093435c3d Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Thu, 28 Mar 2019 16:58:51 +0530 Subject: [PATCH 14/40] addrindexer: index by address prefix --- lib/indexer/addrindexer.js | 46 ++++++++++++++++++++++---------------- test/indexer-test.js | 2 +- 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index a9c5d5939..1f7d13fb2 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -16,8 +16,10 @@ const Indexer = require('./indexer'); /* * AddrIndexer Database Layout: - * A[addr-hash][height][index][hash] -> dummy (tx by address, height and index) - * a[addr-hash][hash] -> (tx height and index by address and tx hash) + * A[addr-prefix][addr-hash][height][index][hash] -> + * dummy (tx by address, height and index) + * a[addr-prefix][addr-hash][hash] -> + * (tx height and index by address and tx hash) * * The database layout is organized so that transactions are sorted in * the same order as the blocks (e.g. chronological order) using the block @@ -32,8 +34,8 @@ const Indexer = require('./indexer'); */ Object.assign(layout, { - A: bdb.key('A', ['hash', 'uint32', 'uint32', 'hash256']), - a: bdb.key('a', ['hash', 'hash256']) + A: bdb.key('A', ['uint8', 'hash', 'uint32', 'uint32', 'hash256']), + a: bdb.key('a', ['uint8', 'hash', 'hash256']) }); /** @@ -132,11 +134,13 @@ class AddrIndexer extends Indexer { const tx = block.txs[i]; const hash = tx.hash(); - for (const addr of tx.getHashes(view)) { + for (const addr of tx.getAddresses(view)) { + const prefix = addr.getPrefix(); + const addrHash = addr.getHash(); const count = new Count(height, i); - b.put(layout.A.encode(addr, height, i, hash), null); - b.put(layout.a.encode(addr, hash), count.toRaw()); + b.put(layout.A.encode(prefix, addrHash, height, i, hash), null); + b.put(layout.a.encode(prefix, addrHash, hash), count.toRaw()); } } @@ -159,9 +163,11 @@ class AddrIndexer extends Indexer { const tx = block.txs[i]; const hash = tx.hash(); - for (const addr of tx.getHashes(view)) { - b.del(layout.A.encode(addr, height, i, hash)); - b.del(layout.a.encode(addr, hash)); + for (const addr of tx.getAddresses(view)) { + const prefix = addr.getPrefix(); + const addrHash = addr.getHash(); + b.del(layout.A.encode(prefix, addrHash, height, i, hash)); + b.del(layout.a.encode(prefix, addrHash, hash)); } } @@ -190,14 +196,15 @@ class AddrIndexer extends Indexer { throw new Error('Limit above max of ${this.maxTxs}.'); const hash = Address.getHash(addr); + const prefix = addr.getPrefix(); await this.db.keys({ - gte: layout.A.min(hash), - lte: layout.A.max(hash), + gte: layout.A.min(prefix, hash), + lte: layout.A.max(prefix, hash), limit, reverse, parse: (key) => { - const [,,, txid] = layout.A.decode(key); + const [,,,, txid] = layout.A.decode(key); set.add(txid); } }); @@ -220,6 +227,7 @@ class AddrIndexer extends Indexer { const set = new BufferSet(); const hash = Address.getHash(addr); + const prefix = addr.getPrefix(); const {txid, reverse} = options; let {limit} = options; @@ -230,7 +238,7 @@ class AddrIndexer extends Indexer { if (limit > this.maxTxs) throw new Error('Limit above max of ${this.maxTxs}.'); - const raw = await this.db.get(layout.a.encode(hash, txid)); + const raw = await this.db.get(layout.a.encode(prefix, hash, txid)); if (!raw) return []; @@ -242,17 +250,17 @@ class AddrIndexer extends Indexer { limit, reverse, parse: (key) => { - const [,,, txid] = layout.A.decode(key); + const [,,,, txid] = layout.A.decode(key); set.add(txid); } }; if (!reverse) { - opts.gt = layout.A.min(hash, height, index, txid); - opts.lte = layout.A.max(hash); + opts.gt = layout.A.min(prefix, hash, height, index, txid); + opts.lte = layout.A.max(prefix, hash); } else { - opts.gte = layout.A.min(hash); - opts.lt = layout.A.max(hash, height, index, txid); + opts.gte = layout.A.min(prefix, hash); + opts.lt = layout.A.max(prefix, hash, height, index, txid); } await this.db.keys(opts); diff --git a/test/indexer-test.js b/test/indexer-test.js index bb101533d..90f3cc65f 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -208,7 +208,7 @@ describe('Indexer', function() { const vectors = [ // Secret for the vectors: // cVDJUtDjdaM25yNVVDLLX3hcHUfth4c7tY3rSc4hy9e8ibtCuj6G - // {addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', amount: 19.99}, + {addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', amount: 19.99}, {addr: 'muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi', amount: 1.99} ]; From 93c6ff845e63a15aaabc73455985af72246eb5cb Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 5 Apr 2019 17:11:18 -0700 Subject: [PATCH 15/40] indexer: fix, simplify and rewrite indexer base - Write indexer state and index atomically. - Simplify chain state with current height. - Roll forward to best height. - Synchronize the index with the chain with common method `sync` for the events 'connect', 'disconnect' and 'reset'. This will prevent any of the events from conflicting with each other. - Fix the chain rollback and reset. - Make sure blocks are connected in the correct order. - Improve logging to log similar to chain. --- lib/indexer/addrindexer.js | 22 +- lib/indexer/indexer.js | 672 +++++++++++++++---------------------- lib/indexer/records.js | 69 ---- lib/indexer/txindexer.js | 12 +- lib/node/fullnode.js | 6 + test/indexer-test.js | 16 +- 6 files changed, 305 insertions(+), 492 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 1f7d13fb2..0572a88be 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -127,7 +127,6 @@ class AddrIndexer extends Indexer { */ async indexBlock(entry, block, view) { - const b = this.db.batch(); const height = entry.height; for (let i = 0; i < block.txs.length; i++) { @@ -136,15 +135,17 @@ class AddrIndexer extends Indexer { for (const addr of tx.getAddresses(view)) { const prefix = addr.getPrefix(); + + if (prefix < 0) + continue; + const addrHash = addr.getHash(); const count = new Count(height, i); - b.put(layout.A.encode(prefix, addrHash, height, i, hash), null); - b.put(layout.a.encode(prefix, addrHash, hash), count.toRaw()); + this.put(layout.A.encode(prefix, addrHash, height, i, hash), null); + this.put(layout.a.encode(prefix, addrHash, hash), count.toRaw()); } } - - return b.write(); } /** @@ -156,7 +157,6 @@ class AddrIndexer extends Indexer { */ async unindexBlock(entry, block, view) { - const b = this.db.batch(); const height = entry.height; for (let i = 0; i < block.txs.length; i++) { @@ -165,13 +165,15 @@ class AddrIndexer extends Indexer { for (const addr of tx.getAddresses(view)) { const prefix = addr.getPrefix(); + + if (prefix < 0) + continue; + const addrHash = addr.getHash(); - b.del(layout.A.encode(prefix, addrHash, height, i, hash)); - b.del(layout.a.encode(prefix, addrHash, hash)); + this.del(layout.A.encode(prefix, addrHash, height, i, hash)); + this.del(layout.a.encode(prefix, addrHash, hash)); } } - - return b.write(); } /** diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index dc7a5c35c..702d5a4ad 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -9,17 +9,13 @@ const assert = require('assert'); const path = require('path'); const fs = require('bfile'); +const bio = require('bufio'); const EventEmitter = require('events'); -const {Lock} = require('bmutex'); const Logger = require('blgr'); const Network = require('../protocol/network'); +const util = require('../utils/util'); const layout = require('./layout'); -const records = require('./records'); - -const { - ChainState, - BlockMeta -} = records; +const {BlockMeta} = require('./records'); /** * Indexer @@ -27,13 +23,12 @@ const { * @extends EventEmitter * @property {IndexerDB} db * @property {Number} height - * @property {ChainState} state * @emits Indexer#chain tip */ class Indexer extends EventEmitter { /** - * Create a index db. + * Create an indexer. * @constructor * @param {String} module * @param {Object} options @@ -53,103 +48,72 @@ class Indexer extends EventEmitter { this.chain = this.options.chain; this.db = null; - this.rescanning = false; - - this.state = new ChainState(); + this.batch = null; + this.syncing = false; this.height = 0; - - this.lock = new Lock(); } /** - * Bind to chain events. - * @private + * Start a new batch write. + * @returns {Batch} */ - bind() { - this.chain.on('connect', async (entry, block, view) => { - if (this.rescanning) - return; + start() { + assert(this.batch === null, 'Already started.'); + this.batch = this.db.batch(); + return this.batch; + } - try { - await this.addBlock(entry, block, view); - } catch (e) { - this.emit('error', e); - } - }); + /** + * Put key and value to the current batch. + * @param {String} key + * @param {Buffer} value + */ - this.chain.on('disconnect', async (entry, block, view) => { - if (this.rescanning) - return; + put(key, value) { + this.batch.put(key, value); + } - try { - await this.removeBlock(entry, block, view); - } catch (e) { - this.emit('error', e); - } - }); + /** + * Delete key from the current batch. + * @param {String} key + */ - this.chain.on('reset', async (tip) => { - try { - await this.resetChain(tip); - } catch (e) { - this.emit('error', e); - } - }); + del(key) { + this.batch.del(key); } /** - * Ensure prefix directory (prefix/index). + * Commit the current batch. * @returns {Promise} */ - async ensure() { - if (fs.unsupported) - return undefined; - - if (this.options.memory) - return undefined; - - return fs.mkdirp(this.options.prefix); + async commit() { + await this.batch.write(); + this.batch = null; } /** - * Open the indexdb, wait for the database to load. + * Open the indexer, open the database, + * initialize height, and bind to events. * @returns {Promise} */ async open() { + this.logger.info('Indexer is loading.'); + await this.ensure(); await this.db.open(); await this.db.verify(layout.V.encode(), 'index', 0); - await this.verifyNetwork(); - this.bind(); - - await this.sync(); - } - - /** - * Verify network. - * @returns {Promise} - */ - - async verifyNetwork() { - const raw = await this.db.get(layout.O.encode()); - - if (!raw) { - const b = this.db.batch(); - b.put(layout.O.encode(), fromU32(this.network.magic)); - return b.write(); - } - - const magic = raw.readUInt32LE(0, true); - - if (magic !== this.network.magic) - throw new Error('Network mismatch for Indexer.'); + // Initialize the indexed height. + const data = await this.db.get(layout.R.encode()); + if (data) + this.height = bio.readU32(data, 0); - return undefined; + // Bind to chain events. + this.bind(); } /** @@ -162,68 +126,69 @@ class Indexer extends EventEmitter { } /** - * Sync state with server on every connect. + * Ensure prefix directory (prefix/index). * @returns {Promise} */ - async sync() { - const unlock = await this.lock.lock(); - try { - this.logger.info('Resyncing from server...'); - await this.syncState(); - await this.syncChain(); - } finally { - unlock(); - } + async ensure() { + if (fs.unsupported) + return; + + if (this.options.memory) + return; + + await fs.mkdirp(this.options.prefix); } /** - * Initialize and write initial sync state. + * Verify network of index. * @returns {Promise} */ - async syncState() { - const cache = await this.getState(); - - if (cache) { - this.state = cache; - this.height = cache.height; - - this.logger.info( - 'Indexer loaded (height=%d, start=%d).', - this.state.height, - this.state.startHeight); - return undefined; - } - - this.logger.info('Initializing database state from server.'); - - const b = this.db.batch(); - const hashes = await this.chain.getHashes(); - - let tip = null; + async verifyNetwork() { + let raw = await this.db.get(layout.O.encode()); - for (let height = 0; height < hashes.length; height++) { - const hash = hashes[height]; - const meta = new BlockMeta(hash, height); - b.put(layout.h.encode(height), meta.toHash()); - tip = meta; + if (!raw) { + raw = bio.write(4).writeU32(this.network.magic).render(); + await this.db.put(layout.O.encode(), raw); + return; } - assert(tip); + const magic = bio.readU32(raw, 0); - const state = this.state.clone(); - state.startHeight = 0; - state.height = tip.height; + if (magic !== this.network.magic) + throw new Error('Indexer: Network mismatch.'); + } - b.put(layout.R.encode(), state.toRaw()); + /** + * Bind to chain events. + * @private + */ - await b.write(); + bind() { + this.chain.on('connect', async (entry, block, view) => { + try { + await this.sync(entry, block, view); + } catch (e) { + this.emit('error', e); + } + }); - this.state = state; - this.height = state.height; + this.chain.on('disconnect', async (entry, block, view) => { + try { + await this.sync(entry, block, view); + } catch (e) { + this.emit('error', e); + } + }); - return undefined; + this.chain.on('reset', async (tip) => { + try { + await this.sync(tip); + } catch (e) { + this.emit('error', e); + } + }); } /** @@ -245,244 +210,176 @@ class Indexer extends EventEmitter { } /** - * Connect and sync with the chain server. - * @private + * Get a index block meta. + * @param {Hash} hash * @returns {Promise} */ - async syncChain() { - let height = this.state.height; - - this.logger.info('Syncing state from height %d.', height); - - // A re-org when we're offline might - // leave chain in a different state. - // Scan chain backwards until we - // find a known 'good' height. - for (;;) { - const tip = await this.getBlock(height); - assert(tip); - - if (await this.getEntry(tip.hash)) - break; + async getBlockMeta(height) { + const data = await this.db.get(layout.h.encode(height)); - assert(height !== 0); - height -= 1; - } + if (!data) + return null; - // Start scan from last indexed OR - // last known 'good' height whichever - // is lower, because `scan` scans from - // low to high blocks - if (this.state.startHeight < height) - height = this.state.startHeight; + const block = new BlockMeta(); + block.hash = data; + block.height = height; - return this._rescan(height); + return block; } /** - * Rescan a block. - * @private + * Sync with the chain. * @param {ChainEntry} entry - * @param {TX[]} txs + * @param {Block} block + * @param {CoinView} view * @returns {Promise} */ - async rescanBlock(entry, block, view) { - this.logger.spam('Rescanning block: %d.', entry.height); - - if (!this.rescanning) { - this.logger.warning('Unsolicited rescan block: %d.', entry.height); + async sync(entry, block, view) { + if (this.syncing) return; - } - if (entry.height % 1000 === 0) - this.logger.debug('Rescanned block: %d.', entry.height); + this.syncing = true; - if (entry.height > this.state.height + 1) { - this.logger.warning('Rescan block too high: %d.', entry.height); - return; - } + const connected = await this._syncBlock(entry, block, view); - try { - await this._addBlock(entry, block, view); - } catch (e) { - this.emit('error', e); - throw e; + if (connected) { + this.syncing = false; + } else { + (async () => { + await this._syncChain(entry); + this.syncing = false; + })(); } } /** - * Force a rescan. - * @param {Number} height + * Sync with the chain with a block. + * @private + * @param {ChainEntry} entry + * @param {Block} block + * @param {CoinView} view * @returns {Promise} */ - async rescan(height) { - const unlock = await this.lock.lock(); - try { - return await this._rescan(height); - } finally { - unlock(); + async _syncBlock(entry, block, view) { + // In the case that the next block is being + // connected or the current block disconnected + // use the block and view being passed directly, + // instead of reading that information again. + if (entry && block && view) { + if (entry.height === this.height + 1) { + await this._addBlock(entry, block, view); + return true; + } else if (entry.height === this.height) { + await this._removeBlock(entry, block, view); + return true; + } } + return false; } /** - * Rescan blockchain from a given height. + * Sync with the chain. * @private - * @param {Number} height + * @param {ChainEntry} entry * @returns {Promise} */ - async _rescan(height) { - assert((height >>> 0) === height, 'Must pass in a height.'); - - await this.rollback(height); + async _syncChain(entry) { + let height = this.height; - const tip = this.state.height; - - this.logger.debug('Rescanning from %d to %d', height, tip); + // In the case that the indexer has never + // started, sync to the best height. + if (!height) { + await this._rollforward(); + return; + } - this.rescanning = true; + // Check for a re-org that might + // leave chain in a different state. + // Scan chain backwards until we + // find a common height. + for (;;) { + const tip = await this.getBlockMeta(height); + assert(tip); - for (let i = height; ; i++) { - const entry = await this.chain.getEntry(i); - if (!entry) + if (await this.getEntry(tip.hash)) break; - const block = await this.chain.getBlock(entry.hash); - assert(block); - - const view = await this.chain.getBlockView(block); - assert(view); - - await this.rescanBlock(entry, block, view); + assert(height !== 0); + height -= 1; } - this.rescanning = false; - } - - /** - * Get the best block hash. - * @returns {Promise} - */ - - async getState() { - const data = await this.db.get(layout.R.encode()); - - if (!data) - return null; - - return ChainState.fromRaw(data); - } - - /** - * Sync the current chain state to tip. - * @param {BlockMeta} tip - * @returns {Promise} - */ - - async setTip(tip) { - const b = this.db.batch(); - const state = this.state.clone(); - - if (tip.height < state.height) { - // Hashes ahead of our new tip - // that we need to delete. - while (state.height !== tip.height) { - b.del(layout.h.encode(state.height)); - state.height -= 1; - } - } else if (tip.height > state.height) { - assert(tip.height === state.height + 1, 'Bad chain sync.'); - state.height += 1; + // In the case that the chain is reset + // the entry will be less than the + // current height. + if (entry && entry.height < height) + height = entry.height; + + if (height < this.height) { + await this._rollback(height); + await this._rollforward(); + } else { + await this._rollforward(); } - - state.startHeight = tip.height; - - // Save tip and state. - b.put(layout.h.encode(tip.height), tip.toHash()); - b.put(layout.R.encode(), state.toRaw()); - - await b.write(); - - this.state = state; - this.height = state.height; } /** - * Get a index block meta. - * @param {Hash} hash + * Scan blockchain to the best chain height. + * @private * @returns {Promise} */ - async getBlock(height) { - const data = await this.db.get(layout.h.encode(height)); - - if (!data) - return null; - - const block = new BlockMeta(); - block.hash = data; - block.height = height; - - return block; - } + async _rollforward() { + this.logger.info('Indexing to best height.'); - /** - * Get index tip. - * @param {Hash} hash - * @returns {Promise} - */ + for (let i = this.height + 1; ; i++) { + const entry = await this.getEntry(i); + if (!entry) + break; - async getTip() { - const tip = await this.getBlock(this.state.height); + const block = await this.chain.getBlock(entry.hash); + assert(block); - if (!tip) - throw new Error('Indexer: Tip not found!'); + const view = await this.chain.getBlockView(block); + assert(view); - return tip; + await this._addBlock(entry, block, view); + } } /** - * Sync with chain height. + * Rollback to a given chain height. * @param {Number} height * @returns {Promise} */ - async rollback(height) { - if (height > this.state.height) - throw new Error('Indexer: Cannot rollback to the future.'); - - if (height === this.state.height) { - this.logger.info('Rolled back to same height (%d).', height); + async _rollback(height) { + if (height > this.height) { + this.logger.warning( + 'Ignoring rollback to future height (%d).', + height); return; } - this.logger.info( - 'Rolling back %d Indexer blocks to height %d.', - this.state.height - height, height); + this.logger.info('Rolling back to height %d.', height); - const tip = await this.getBlock(height); - assert(tip); + while (this.height > height) { + const tip = await this.getBlockMeta(this.height); + assert(tip); - await this.revert(tip.height); - await this.setTip(tip); - } + const entry = await this.chain.getEntry(tip.hash); + assert(entry); - /** - * Add a block's transactions and write the new best hash. - * @param {ChainEntry} entry - * @param {Block} block - * @returns {Promise} - */ + const block = await this.chain.getBlock(entry.hash); + assert(block); - async addBlock(entry, block, view) { - const unlock = await this.lock.lock(); - try { - return await this._addBlock(entry, block, view); - } finally { - unlock(); + const view = await this.chain.getBlockView(block); + assert(view); + + await this._removeBlock(entry, block, view); } } @@ -497,34 +394,31 @@ class Indexer extends EventEmitter { async _addBlock(entry, block, view) { assert(block.hasRaw(), 'Expected raw data for block.'); - const tip = BlockMeta.fromEntry(entry); + const start = util.bench(); - if (tip.height >= this.network.block.slowHeight && !this.rescanning) - this.logger.debug('Adding block: %d.', tip.height); - - this.logger.spam('Adding block: %d.', entry.height); - - if (tip.height === this.state.height) { - // We let blocks of the same height - // through specifically for rescans: - // we always want to rescan the last - // block since the state may have - // updated before the block was fully - // processed (in the case of a crash). - this.logger.warning('Already saw Indexer block (%d).', tip.height); - } else if (tip.height !== this.state.startHeight + 1) { - await this._rescan(this.state.height); - return; - } + if (entry.height !== this.height + 1) + throw new Error('Indexer: Can not add block.'); + + const tip = BlockMeta.fromEntry(entry); - this.logger.spam('Indexing block: %d.', entry.height); + // Start the batch write. + this.start(); + // Call the implemented indexer to add to + // the batch write. await this.indexBlock(entry, block, view); - // Sync the state to the new tip. - await this.setTip(tip); + // Sync the height to the new tip. + const height = await this._setTip(tip); - return; + // Commit the write batch to disk. + await this.commit(); + + // Update height _after_ successful commit. + this.height = height; + + // Log the current indexer status. + this.logStatus(start, block, entry); } /** @@ -551,32 +445,6 @@ class Indexer extends EventEmitter { ; } - /** - * Revert db to an older state. - * @param {Number} target - * @returns {Promise} - */ - - async revert(target) { - ; - } - - /** - * Unconfirm a block's transactions - * and write the new best hash (SPV version). - * @param {ChainEntry} entry - * @returns {Promise} - */ - - async removeBlock(entry, block, view) { - const unlock = await this.lock.lock(); - try { - return await this._removeBlock(entry, block, view); - } finally { - unlock(); - } - } - /** * Unconfirm a block's transactions. * @private @@ -585,63 +453,94 @@ class Indexer extends EventEmitter { */ async _removeBlock(entry, block, view) { - const tip = BlockMeta.fromEntry(entry); - - this.logger.spam('Removing block: %d.', entry.height); + const start = util.bench(); - if (tip.height === 0) - throw new Error('Indexer: Bad disconnection (genesis block).'); + if (entry.height !== this.height) + throw new Error('Indexer: Can not remove block.'); - if (tip.height > this.state.height) { - this.logger.warning( - 'Indexer is disconnecting high blocks (%d).', - tip.height); - return; - } - - if (tip.height !== this.state.height) - throw new Error('Indexer: Bad disconnection (height mismatch).'); + const tip = BlockMeta.fromEntry(entry); - this.logger.spam('Unindexing block: %d.', entry.height); + // Start the batch write. + this.start(); + // Call the implemented indexer to add to + // the batch write. await this.unindexBlock(entry, block, view); - const prev = await this.getBlock(tip.height - 1); + const prev = await this.getBlockMeta(tip.height - 1); assert(prev); - // Sync the state to the previous tip. - await this.setTip(prev); + // Sync the height to the previous tip. + const height = await this._setTip(prev); + + // Commit the write batch to disk. + await this.commit(); + + // Update height _after_ successful commit. + this.height = height; - return; + // Log the current indexer status. + this.logStatus(start, block, entry); } /** - * Handle a chain reset. - * @param {ChainEntry} entry + * Update the current height to tip. + * @param {BlockMeta} tip * @returns {Promise} */ - async resetChain(entry) { - const unlock = await this.lock.lock(); - try { - return await this._resetChain(entry); - } finally { - unlock(); + async _setTip(tip) { + if (tip.height < this.height) { + assert(tip.height === this.height - 1); + this.del(layout.h.encode(this.height)); + } else if (tip.height > this.height) { + assert(tip.height === this.height + 1); } + + // Add to batch write to save tip and height. + this.put(layout.h.encode(tip.height), tip.toHash()); + + const raw = bio.write(4).writeU32(tip.height).render(); + this.put(layout.R.encode(), raw); + + return tip.height; + } + + /** + * Test whether the indexer has reached its slow height. + * @private + * @returns {Boolean} + */ + + isSlow() { + if (this.height === 1 || this.height % 20 === 0) + return true; + + if (this.height >= this.network.block.slowHeight) + return true; + + return false; } /** - * Handle a chain reset without a lock. + * Log the current indexer status. * @private + * @param {Array} start + * @param {Block} block * @param {ChainEntry} entry - * @returns {Promise} */ - async _resetChain(entry) { - if (entry.height > this.state.height) - throw new Error('Indexer: Bad reset height.'); + logStatus(start, block, entry) { + if (!this.isSlow()) + return; + + const elapsed = util.bench(start); - return this.rollback(entry.height); + this.logger.info( + 'Block (%d) added to indexer (txs=%d time=%d).', + entry.height, + block.txs.length, + elapsed); } } @@ -664,7 +563,6 @@ class IndexOptions { this.logger = Logger.global; this.blocks = null; this.chain = null; - this.indexers = null; this.prefix = null; this.location = null; @@ -737,7 +635,7 @@ class IndexOptions { } /** - * Instantiate chain options from object. + * Instantiate indexer options from object. * @param {Object} options * @returns {IndexOptions} */ @@ -747,22 +645,6 @@ class IndexOptions { } } -/* - * Helpers - */ - -/** - * fromU32 - * read a 4 byte Uint32LE - * @param {Number} num number - * @returns {Buffer} buffer - */ -function fromU32(num) { - const data = Buffer.allocUnsafe(4); - data.writeUInt32LE(num, 0, true); - return data; -} - /* * Expose */ diff --git a/lib/indexer/records.js b/lib/indexer/records.js index 3e67e8436..7db64b943 100644 --- a/lib/indexer/records.js +++ b/lib/indexer/records.js @@ -14,74 +14,6 @@ const bio = require('bufio'); const util = require('../utils/util'); const consensus = require('../protocol/consensus'); -/** - * Chain State - * @alias module:indexer.ChainState - */ - -class ChainState { - /** - * Create a chain state. - * @constructor - */ - - constructor() { - this.startHeight = 0; - this.height = 0; - } - - /** - * Clone the state. - * @returns {ChainState} - */ - - clone() { - const state = new ChainState(); - state.startHeight = this.startHeight; - state.height = this.height; - return state; - } - - /** - * Inject properties from serialized data. - * @private - * @param {Buffer} data - */ - - fromRaw(data) { - const br = bio.read(data); - - this.startHeight = br.readU32(); - this.height = br.readU32(); - - return this; - } - - /** - * Instantiate chain state from serialized data. - * @param {Buffer} data - * @returns {ChainState} - */ - - static fromRaw(data) { - return new this().fromRaw(data); - } - - /** - * Serialize the chain state. - * @returns {Buffer} - */ - - toRaw() { - const bw = bio.write(8); - - bw.writeU32(this.startHeight); - bw.writeU32(this.height); - - return bw.render(); - } -} - /** * Block Meta * @alias module:indexer.BlockMeta @@ -215,7 +147,6 @@ class BlockMeta { * Expose */ -exports.ChainState = ChainState; exports.BlockMeta = BlockMeta; module.exports = exports; diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index 8821fc307..82746f511 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -129,8 +129,6 @@ class TXIndexer extends Indexer { */ async indexBlock(entry, block, view) { - const b = this.db.batch(); - for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; @@ -146,10 +144,8 @@ class TXIndexer extends Indexer { length: size }); - b.put(layout.t.encode(hash), txrecord.toRaw()); + this.put(layout.t.encode(hash), txrecord.toRaw()); } - - return b.write(); } /** @@ -161,15 +157,11 @@ class TXIndexer extends Indexer { */ async unindexBlock(entry, block, view) { - const b = this.db.batch(); - for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; const hash = tx.hash(); - b.del(layout.t.encode(hash)); + this.del(layout.t.encode(hash)); } - - return b.write(); } /** diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 4e6747d40..73e973f87 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -410,6 +410,12 @@ class FullNode extends Node { */ startSync() { + if (this.txindex) + this.txindex.sync(); + + if (this.addrindex) + this.addrindex.sync(); + return this.pool.startSync(); } diff --git a/test/indexer-test.js b/test/indexer-test.js index 90f3cc65f..839bc3cdf 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -90,8 +90,8 @@ describe('Indexer', function() { } assert.strictEqual(chain.height, 10); - assert.strictEqual(txindexer.state.startHeight, 10); - assert.strictEqual(addrindexer.state.startHeight, 10); + assert.strictEqual(txindexer.height, 10); + assert.strictEqual(addrindexer.height, 10); }); it('should get txs by address', async () => { @@ -172,8 +172,8 @@ describe('Indexer', function() { } assert.strictEqual(chain.height, 20); - assert.strictEqual(txindexer.state.startHeight, 20); - assert.strictEqual(addrindexer.state.startHeight, 20); + assert.strictEqual(txindexer.height, 20); + assert.strictEqual(addrindexer.height, 20); const hashes = await addrindexer.getHashesByAddress(miner.getAddress()); assert.strictEqual(hashes.length, 20); @@ -187,8 +187,8 @@ describe('Indexer', function() { it('should handle indexing a reorg', async () => { await reorg(chain, cpu, 10); - assert.strictEqual(txindexer.state.startHeight, 31); - assert.strictEqual(addrindexer.state.startHeight, 31); + assert.strictEqual(txindexer.height, 31); + assert.strictEqual(addrindexer.height, 31); const hashes = await addrindexer.getHashesByAddress(miner.getAddress()); assert.strictEqual(hashes.length, 31); @@ -269,9 +269,9 @@ describe('Indexer', function() { 'getnewaddress', ['default']); const blocks = await nclient.execute( - 'generatetoaddress', [120, coinbase]); + 'generatetoaddress', [150, coinbase]); - assert.equal(blocks.length, 120); + assert.equal(blocks.length, 150); // Send to the vector addresses for several blocks. for (let i = 0; i < 10; i++) { From ebc40a58d0cab0a23432084516f1bd401061d049 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 15 Apr 2019 14:54:18 -0700 Subject: [PATCH 16/40] indexer: trim disk usage for tx and addr indexes --- lib/indexer/addrindexer.js | 87 +++++++++++++++++----------- lib/indexer/txindexer.js | 116 ++++++++++++++++++++++++++++++------- 2 files changed, 148 insertions(+), 55 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 0572a88be..7b91a4205 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -9,33 +9,37 @@ const assert = require('assert'); const bdb = require('bdb'); const bio = require('bufio'); -const {BufferSet} = require('buffer-map'); const layout = require('./layout'); const Address = require('../primitives/address'); const Indexer = require('./indexer'); /* * AddrIndexer Database Layout: - * A[addr-prefix][addr-hash][height][index][hash] -> - * dummy (tx by address, height and index) - * a[addr-prefix][addr-hash][hash] -> - * (tx height and index by address and tx hash) + * A[addr-prefix][addr-hash][height][index] -> dummy (tx by address) + * C[height][index] -> hash (tx hash by height and index) + * c[hash]-> height + index (tx height and index by hash) * - * The database layout is organized so that transactions are sorted in - * the same order as the blocks (e.g. chronological order) using the block - * height and transaction index. This provides the ability to query for - * sets of transactions within that order. For a wallet that would like to - * synchronize or rescan, this could be a query for all of the latest - * transactions, but not for earlier transactions that are already known. - * Furthermore, to be able to query for all transactions in multiple sets - * without reference to height and index, there is a mapping from address - * and tx hash to the height and index as an entry point to the - * ordered transactions. + * The database layout is organized so that transactions are + * sorted in the same order as the blocks using the block height + * and transaction index. This provides the ability to query for + * sets of transactions within that order. For a wallet that would + * like to synchronize or rescan, this could be a query for all of + * the latest transactions, but not for earlier transactions that + * are already known. + * + * To be able to query for all transactions in multiple sets without + * reference to height and index, there is a mapping from tx hash to + * the height and index as an entry point. + * + * A mapping of height and index is kept for each transaction + * hash so that the tx hash is not repeated for every address within + * a transaction. */ Object.assign(layout, { - A: bdb.key('A', ['uint8', 'hash', 'uint32', 'uint32', 'hash256']), - a: bdb.key('a', ['uint8', 'hash', 'hash256']) + A: bdb.key('A', ['uint8', 'hash', 'uint32', 'uint32']), + C: bdb.key('C', ['uint32', 'uint32']), + c: bdb.key('c', ['hash256']) }); /** @@ -50,7 +54,7 @@ class Count { * @param {Number} index */ - constructor(height, index, coin) { + constructor(height, index) { this.height = height || 0; this.index = index || 0; @@ -132,6 +136,10 @@ class AddrIndexer extends Indexer { for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; const hash = tx.hash(); + const count = new Count(height, i); + + this.put(layout.C.encode(height, i), hash); + this.put(layout.c.encode(hash), count.toRaw()); for (const addr of tx.getAddresses(view)) { const prefix = addr.getPrefix(); @@ -140,10 +148,8 @@ class AddrIndexer extends Indexer { continue; const addrHash = addr.getHash(); - const count = new Count(height, i); - this.put(layout.A.encode(prefix, addrHash, height, i, hash), null); - this.put(layout.a.encode(prefix, addrHash, hash), count.toRaw()); + this.put(layout.A.encode(prefix, addrHash, height, i), null); } } } @@ -163,6 +169,9 @@ class AddrIndexer extends Indexer { const tx = block.txs[i]; const hash = tx.hash(); + this.del(layout.C.encode(height, i)); + this.del(layout.c.encode(hash)); + for (const addr of tx.getAddresses(view)) { const prefix = addr.getPrefix(); @@ -170,8 +179,8 @@ class AddrIndexer extends Indexer { continue; const addrHash = addr.getHash(); - this.del(layout.A.encode(prefix, addrHash, height, i, hash)); - this.del(layout.a.encode(prefix, addrHash, hash)); + + this.del(layout.A.encode(prefix, addrHash, height, i)); } } } @@ -186,7 +195,7 @@ class AddrIndexer extends Indexer { */ async getHashesByAddress(addr, options = {}) { - const set = new BufferSet(); + const txs = []; const {reverse} = options; let {limit} = options; @@ -206,12 +215,17 @@ class AddrIndexer extends Indexer { limit, reverse, parse: (key) => { - const [,,,, txid] = layout.A.decode(key); - set.add(txid); + const [,, height, index] = layout.A.decode(key); + txs.push([height, index]); } }); - return set.toArray(); + const hashes = []; + + for (const [height, index] of txs) + hashes.push(await this.db.get(layout.C.encode(height, index))); + + return hashes; } /** @@ -226,7 +240,7 @@ class AddrIndexer extends Indexer { */ async getHashesByAddressAfter(addr, options = {}) { - const set = new BufferSet(); + const txs = []; const hash = Address.getHash(addr); const prefix = addr.getPrefix(); @@ -240,7 +254,7 @@ class AddrIndexer extends Indexer { if (limit > this.maxTxs) throw new Error('Limit above max of ${this.maxTxs}.'); - const raw = await this.db.get(layout.a.encode(prefix, hash, txid)); + const raw = await this.db.get(layout.c.encode(txid)); if (!raw) return []; @@ -252,22 +266,27 @@ class AddrIndexer extends Indexer { limit, reverse, parse: (key) => { - const [,,,, txid] = layout.A.decode(key); - set.add(txid); + const [,, height, index] = layout.A.decode(key); + txs.push([height, index]); } }; if (!reverse) { - opts.gt = layout.A.min(prefix, hash, height, index, txid); + opts.gt = layout.A.min(prefix, hash, height, index); opts.lte = layout.A.max(prefix, hash); } else { opts.gte = layout.A.min(prefix, hash); - opts.lt = layout.A.max(prefix, hash, height, index, txid); + opts.lt = layout.A.max(prefix, hash, height, index); } await this.db.keys(opts); - return set.toArray(); + const hashes = []; + + for (const [height, index] of txs) + hashes.push(await this.db.get(layout.C.encode(height, index))); + + return hashes; } } diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index 82746f511..15e73f534 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -17,18 +17,26 @@ const Indexer = require('./indexer'); /* * TXIndexer Database Layout: - * t[hash] -> extended tx -*/ + * t[hash] -> tx record + * b[height] -> block record + * + * The transaction index maps a transaction to a block + * and an index, offset, and length within that block. The + * block hash is stored in a separate record by height so that + * the 32 byte hash is not repeated for every transaction + * within a block. + */ Object.assign(layout, { - t: bdb.key('t', ['hash256']) + t: bdb.key('t', ['hash256']), + b: bdb.key('b', ['uint32']) }); /** - * Transaction Record + * Block Record */ -class TxRecord { +class BlockRecord { /** * Create a block record. * @constructor @@ -36,14 +44,70 @@ class TxRecord { constructor(options = {}) { this.block = options.block || consensus.ZERO_HASH; - this.height = options.height || 0; this.time = options.time || 0; + + assert(this.block.length === 32); + assert((this.time >>> 0) === this.time); + } + + /** + * Inject properties from serialized data. + * @private + * @param {Buffer} data + */ + + fromRaw(data) { + const br = bio.read(data); + + this.block = br.readHash(); + this.time = br.readU32(); + + return this; + } + + /** + * Instantiate block record from serialized data. + * @param {Hash} hash + * @param {Buffer} data + * @returns {BlockRecord} + */ + + static fromRaw(data) { + return new this().fromRaw(data); + } + + /** + * Serialize the block record. + * @returns {Buffer} + */ + + toRaw() { + const bw = bio.write(36); + + bw.writeHash(this.block); + bw.writeU32(this.time); + + return bw.render(); + } +} + +/** + * Transaction Record + */ + +class TxRecord { + /** + * Create a transaction record. + * @constructor + */ + + constructor(options = {}) { + this.height = options.height || 0; this.index = options.index || 0; this.offset = options.offset || 0; this.length = options.length || 0; assert((this.height >>> 0) === this.height); - assert((this.time >>> 0) === this.time); assert((this.index >>> 0) === this.index); assert((this.offset >>> 0) === this.offset); assert((this.length >>> 0) === this.length); @@ -58,9 +122,7 @@ class TxRecord { fromRaw(data) { const br = bio.read(data); - this.block = br.readHash(); this.height = br.readU32(); - this.time = br.readU32(); this.index = br.readU32(); if (this.index === 0x7fffffff) this.index = -1; @@ -72,7 +134,7 @@ class TxRecord { } /** - * Instantiate block record from serialized data. + * Instantiate transaction record from serialized data. * @param {Hash} hash * @param {Buffer} data * @returns {BlockRecord} @@ -83,16 +145,14 @@ class TxRecord { } /** - * Serialize the block record. + * Serialize the transaction record. * @returns {Buffer} */ toRaw() { - const bw = bio.write(52); + const bw = bio.write(16); - bw.writeHash(this.block); bw.writeU32(this.height); - bw.writeU32(this.time); bw.writeU32(this.index); bw.writeU32(this.offset); bw.writeU32(this.length); @@ -129,6 +189,13 @@ class TXIndexer extends Indexer { */ async indexBlock(entry, block, view) { + const brecord = new BlockRecord({ + block: entry.hash, + time: entry.time + }); + + this.put(layout.b.encode(entry.height), brecord.toRaw()); + for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; @@ -136,9 +203,7 @@ class TXIndexer extends Indexer { const {offset, size} = tx.getPosition(); const txrecord = new TxRecord({ - block: entry.hash, height: entry.height, - time: entry.time, index: i, offset: offset, length: size @@ -157,6 +222,8 @@ class TXIndexer extends Indexer { */ async unindexBlock(entry, block, view) { + this.del(layout.b.encode(entry.height)); + for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; const hash = tx.hash(); @@ -176,17 +243,24 @@ class TXIndexer extends Indexer { return null; const record = TxRecord.fromRaw(raw); - const {block, offset, length} = record; + const {height, index, offset, length} = record; + + const braw = await this.db.get(layout.b.encode(height)); + if (!braw) + return null; + + const brecord = BlockRecord.fromRaw(braw); + const {block, time} = brecord; const data = await this.blocks.read(block, offset, length); const tx = TX.fromRaw(data); const meta = TXMeta.fromTX(tx); - meta.height = record.height; - meta.block = record.block; - meta.time = record.time; - meta.index = record.index; + meta.height = height; + meta.block = block; + meta.time = time; + meta.index = index; return meta; } From 06ef0e3615c434456436230ad388225ee2524684 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 15 Apr 2019 15:56:05 -0700 Subject: [PATCH 17/40] indexer: simplify addrindex query --- lib/indexer/addrindexer.js | 81 +++++++++++--------------------------- test/indexer-test.js | 8 ++-- 2 files changed, 26 insertions(+), 63 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 7b91a4205..b61122265 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -186,10 +186,14 @@ class AddrIndexer extends Indexer { } /** - * Get all transaction hashes to an address. + * Get transaction hashes to an address in ascending or descending + * order. If the `after` argument is supplied, results will be given + * _after_ that transaction hash. The default order is ascending from + * oldest to latest. * @param {Address} addr * @param {Object} options - * @param {Boolean} options.limit + * @param {Buffer} options.after - A transaction hash + * @param {Number} options.limit * @param {Boolean} options.reverse * @returns {Promise} - Returns {@link Hash}[]. */ @@ -197,7 +201,7 @@ class AddrIndexer extends Indexer { async getHashesByAddress(addr, options = {}) { const txs = []; - const {reverse} = options; + const {after, reverse} = options; let {limit} = options; if (!limit) @@ -209,74 +213,33 @@ class AddrIndexer extends Indexer { const hash = Address.getHash(addr); const prefix = addr.getPrefix(); - await this.db.keys({ - gte: layout.A.min(prefix, hash), - lte: layout.A.max(prefix, hash), + const opts = { limit, reverse, parse: (key) => { const [,, height, index] = layout.A.decode(key); txs.push([height, index]); } - }); - - const hashes = []; - - for (const [height, index] of txs) - hashes.push(await this.db.get(layout.C.encode(height, index))); - - return hashes; - } - - /** - * Get all transaction hashes to an address after - * a specific txid. - * @param {Address} addr - * @param {Object} options - * @param {Buffer} options.txid - * @param {Boolean} options.limit - * @param {Boolean} options.reverse - * @returns {Promise} - Returns {@link Hash}[]. - */ - - async getHashesByAddressAfter(addr, options = {}) { - const txs = []; - - const hash = Address.getHash(addr); - const prefix = addr.getPrefix(); - - const {txid, reverse} = options; - let {limit} = options; - - if (!limit) - limit = this.maxTxs; - - if (limit > this.maxTxs) - throw new Error('Limit above max of ${this.maxTxs}.'); - - const raw = await this.db.get(layout.c.encode(txid)); + }; - if (!raw) - return []; + if (after) { + const raw = await this.db.get(layout.c.encode(after)); + if (!raw) + return []; - const count = Count.fromRaw(raw); - const {height, index} = count; + const count = Count.fromRaw(raw); + const {height, index} = count; - const opts = { - limit, - reverse, - parse: (key) => { - const [,, height, index] = layout.A.decode(key); - txs.push([height, index]); + if (!reverse) { + opts.gt = layout.A.min(prefix, hash, height, index); + opts.lte = layout.A.max(prefix, hash); + } else { + opts.gte = layout.A.min(prefix, hash); + opts.lt = layout.A.max(prefix, hash, height, index); } - }; - - if (!reverse) { - opts.gt = layout.A.min(prefix, hash, height, index); - opts.lte = layout.A.max(prefix, hash); } else { opts.gte = layout.A.min(prefix, hash); - opts.lt = layout.A.max(prefix, hash, height, index); + opts.lte = layout.A.max(prefix, hash); } await this.db.keys(opts); diff --git a/test/indexer-test.js b/test/indexer-test.js index 839bc3cdf..86b9cb9ff 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -129,8 +129,8 @@ describe('Indexer', function() { const txid = hashes[4]; - const next = await addrindexer.getHashesByAddressAfter( - addr, {txid: txid, limit: 5}); + const next = await addrindexer.getHashesByAddress( + addr, {after: txid, limit: 5}); assert.strictEqual(next.length, 5); @@ -149,8 +149,8 @@ describe('Indexer', function() { const txid = hashes[4]; - const next = await addrindexer.getHashesByAddressAfter( - addr, {txid: txid, limit: 5, reverse: true}); + const next = await addrindexer.getHashesByAddress( + addr, {after: txid, limit: 5, reverse: true}); assert.strictEqual(next.length, 5); From 4c8f11ed34ab03f021d8710ffd821ae71070fc31 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Mon, 15 Apr 2019 21:25:12 -0700 Subject: [PATCH 18/40] node: update http for addrindex --- lib/node/fullnode.js | 14 +++++--- lib/node/http.js | 16 ++++++++- test/indexer-test.js | 81 ++++++++++++++++++++++++++++++++++++++++---- 3 files changed, 100 insertions(+), 11 deletions(-) diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 73e973f87..72b96b969 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -153,7 +153,8 @@ class FullNode extends Node { port: this.config.uint('http-port'), apiKey: this.config.str('api-key'), noAuth: this.config.bool('no-auth'), - cors: this.config.bool('cors') + cors: this.config.bool('cors'), + maxTxs: this.config.uint('max-txs') }); // Indexers @@ -175,7 +176,8 @@ class FullNode extends Node { blocks: this.blocks, chain: this.chain, memory: this.config.bool('memory'), - prefix: this.config.filter('index').str('prefix') || this.config.prefix + prefix: this.config.filter('index').str('prefix') || this.config.prefix, + maxTxs: this.config.uint('max-txs') }); } @@ -461,14 +463,18 @@ class FullNode extends Node { * Retrieve transactions pertaining to an * address from the mempool or chain database. * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after * @returns {Promise} - Returns {@link TXMeta}[]. */ - async getMetaByAddress(addr) { + async getMetaByAddress(addr, options = {}) { const mempool = this.mempool.getMetaByAddress(addr); if (this.txindex && this.addrindex) { - const hashes = await this.addrindex.getHashesByAddress(addr); + const hashes = await this.addrindex.getHashesByAddress(addr, options); const mtxs = []; for (const hash of hashes) { diff --git a/lib/node/http.js b/lib/node/http.js index 40e80f323..2edd8cb2c 100644 --- a/lib/node/http.js +++ b/lib/node/http.js @@ -193,12 +193,20 @@ class HTTP extends Server { this.get('/tx/address/:address', async (req, res) => { const valid = Validator.fromRequest(req); const address = valid.str('address'); + const limit = valid.uint('limit', this.options.maxTxs); + const reverse = valid.bool('reverse', false); + const after = valid.brhash('after', null); enforce(address, 'Address is required.'); enforce(!this.chain.options.spv, 'Cannot get TX in SPV mode.'); + enforce(limit <= this.options.maxTxs, + `Limit above max of ${this.options.maxTxs}.`); const addr = Address.fromString(address, this.network); - const metas = await this.node.getMetaByAddress(addr); + + const metas = await this.node.getMetaByAddress( + addr, {limit, reverse, after}); + const result = []; for (const meta of metas) { @@ -635,6 +643,7 @@ class HTTPOptions { this.apiHash = sha256.digest(Buffer.from(this.apiKey, 'ascii')); this.noAuth = false; this.cors = false; + this.maxTxs = 100; this.prefix = null; this.host = '127.0.0.1'; @@ -721,6 +730,11 @@ class HTTPOptions { this.certFile = options.certFile; } + if (options.maxTxs != null) { + assert(Number.isSafeInteger(options.maxTxs)); + this.maxTxs = options.maxTxs; + } + // Allow no-auth implicitly // if we're listening locally. if (!options.apiKey) { diff --git a/test/indexer-test.js b/test/indexer-test.js index 86b9cb9ff..cd45499c9 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -208,8 +208,16 @@ describe('Indexer', function() { const vectors = [ // Secret for the vectors: // cVDJUtDjdaM25yNVVDLLX3hcHUfth4c7tY3rSc4hy9e8ibtCuj6G - {addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', amount: 19.99}, - {addr: 'muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi', amount: 1.99} + { + addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', + amount: 19.99, + label: 'p2wpkh' + }, + { + addr: 'muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi', + amount: 1.99, + label: 'p2pkh' + } ]; const txids = []; @@ -295,8 +303,8 @@ describe('Indexer', function() { await node.close(); }); - it('will get txs by address', async () => { - for (const v of vectors) { + for (const v of vectors) { + it(`will get txs by ${v.label} address`, async () => { const res = await nclient.request( 'GET', `/tx/address/${v.addr}`, {}); @@ -304,7 +312,68 @@ describe('Indexer', function() { for (const tx of res) assert(txids.includes(tx.hash)); - } - }); + }); + + it(`will get txs by ${v.label} address (limit)`, async () => { + const res = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 3}); + + for (const tx of res) + assert(txids.includes(tx.hash)); + }); + + it(`txs by ${v.label} address (reverse)`, async () => { + const asc = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {reverse: false}); + + const dsc = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {reverse: true}); + + for (let i = 0; i < dsc.length; i++) + assert.equal(asc[i].hash, dsc[dsc.length - i - 1].hash); + }); + + it(`txs by ${v.label} address after txid`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 3}); + assert.strictEqual(one.length, 3); + + const hash = one[2].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 3}); + assert.strictEqual(one.length, 3); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 6}); + assert.strictEqual(one.length, 3); + + assert.deepEqual(one.concat(two), all); + }); + + it(`txs by ${v.label} address after txid (reverse)`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 3, reverse: true}); + + assert.strictEqual(one.length, 3); + + const hash = one[2].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {after: hash, limit: 3, reverse: true}); + + assert.strictEqual(one.length, 3); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 6, reverse: true}); + + assert.strictEqual(one.length, 3); + + assert.deepEqual(one.concat(two), all); + }); + } }); }); From bd26dbf32d8827ebaec7c1342855610d60f8a72b Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 16 Apr 2019 11:11:36 -0700 Subject: [PATCH 19/40] mempool: update for addrindex --- lib/indexer/addrindexer.js | 11 +- lib/mempool/mempool.js | 347 +++++++++++++++---------------------- lib/node/fullnode.js | 44 ++++- test/indexer-test.js | 151 ++++++++++++++-- 4 files changed, 322 insertions(+), 231 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index b61122265..6f463c070 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -222,11 +222,14 @@ class AddrIndexer extends Indexer { } }; - if (after) { - const raw = await this.db.get(layout.c.encode(after)); - if (!raw) - return []; + const hasAfter = (after && await this.db.has(layout.c.encode(after))); + const skip = (after && !hasAfter && !reverse); + + if (skip) + return []; + if (after && hasAfter) { + const raw = await this.db.get(layout.c.encode(after)); const count = Count.fromRaw(raw); const {height, index} = count; diff --git a/lib/mempool/mempool.js b/lib/mempool/mempool.js index fb1bcd1e3..7410a5d0e 100644 --- a/lib/mempool/mempool.js +++ b/lib/mempool/mempool.js @@ -19,7 +19,6 @@ const policy = require('../protocol/policy'); const util = require('../utils/util'); const random = require('bcrypto/lib/random'); const {VerifyError} = require('../protocol/errors'); -const Address = require('../primitives/address'); const Script = require('../script/script'); const Outpoint = require('../primitives/outpoint'); const TX = require('../primitives/tx'); @@ -73,8 +72,7 @@ class Mempool extends EventEmitter { this.spents = new BufferMap(); this.rejects = new RollingFilter(120000, 0.000001); - this.coinIndex = new CoinIndex(); - this.txIndex = new TXIndex(); + this.addrindex = new AddrIndex(); } /** @@ -364,8 +362,7 @@ class Mempool extends EventEmitter { this.orphans.clear(); this.map.clear(); this.spents.clear(); - this.coinIndex.reset(); - this.txIndex.reset(); + this.addrindex.reset(); this.freeCount = 0; this.lastTime = 0; @@ -568,73 +565,32 @@ class Mempool extends EventEmitter { return entry.tx; } - /** - * Find all coins pertaining to a certain address. - * @param {Address[]} addrs - * @returns {Coin[]} - */ - - getCoinsByAddress(addrs) { - if (!Array.isArray(addrs)) - addrs = [addrs]; - - const out = []; - - for (const addr of addrs) { - const hash = Address.getHash(addr); - const coins = this.coinIndex.get(hash); - - for (const coin of coins) - out.push(coin); - } - - return out; - } - /** * Find all transactions pertaining to a certain address. - * @param {Address[]} addrs + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after * @returns {TX[]} */ - getTXByAddress(addrs) { - if (!Array.isArray(addrs)) - addrs = [addrs]; - - const out = []; - - for (const addr of addrs) { - const hash = Address.getHash(addr); - const txs = this.txIndex.get(hash); - - for (const tx of txs) - out.push(tx); - } - - return out; + getTXByAddress(addr, options) { + return this.addrindex.get(addr, options); } /** * Find all transactions pertaining to a certain address. - * @param {Address[]} addrs + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after * @returns {TXMeta[]} */ - getMetaByAddress(addrs) { - if (!Array.isArray(addrs)) - addrs = [addrs]; - - const out = []; - - for (const addr of addrs) { - const hash = Address.getHash(addr); - const txs = this.txIndex.getMeta(hash); - - for (const tx of txs) - out.push(tx); - } - - return out; + getMetaByAddress(addr, options) { + return this.addrindex.getMeta(addr, options); } /** @@ -1883,17 +1839,7 @@ class Mempool extends EventEmitter { */ indexEntry(entry, view) { - const tx = entry.tx; - - this.txIndex.insert(entry, view); - - for (const {prevout} of tx.inputs) { - const {hash, index} = prevout; - this.coinIndex.remove(hash, index); - } - - for (let i = 0; i < tx.outputs.length; i++) - this.coinIndex.insert(tx, i); + this.addrindex.insert(entry, view); } /** @@ -1903,23 +1849,8 @@ class Mempool extends EventEmitter { */ unindexEntry(entry) { - const tx = entry.tx; - const hash = tx.hash(); - - this.txIndex.remove(hash); - - for (const {prevout} of tx.inputs) { - const {hash, index} = prevout; - const prev = this.getTX(hash); - - if (!prev) - continue; - - this.coinIndex.insert(prev, index); - } - - for (let i = 0; i < tx.outputs.length; i++) - this.coinIndex.remove(hash, i); + const hash = entry.tx.hash(); + this.addrindex.remove(hash); } /** @@ -2188,11 +2119,11 @@ class MempoolOptions { } /** - * TX Address Index + * Address Index * @ignore */ -class TXIndex { +class AddrIndex { /** * Create TX address index. * @constructor @@ -2211,29 +2142,53 @@ class TXIndex { this.map.clear(); } - get(addr) { - const items = this.index.get(addr); + getKey(addr) { + const prefix = addr.getPrefix(); - if (!items) - return []; + if (prefix < 0) + return null; + + const raw = Buffer.allocUnsafe(1); + raw.writeUInt8(prefix); + + return Buffer.concat([raw, addr.getHash()]); + } + + /** + * Get transactions by address. + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after + */ + + get(addr, options = {}) { + const values = this.getEntries(addr, options); const out = []; - for (const entry of items.values()) + for (const entry of values) out.push(entry.tx); return out; } - getMeta(addr) { - const items = this.index.get(addr); + /** + * Get transaction meta by address. + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after + */ - if (!items) - return []; + getMeta(addr, options = {}) { + const values = this.getEntries(addr, options); const out = []; - for (const entry of items.values()) { + for (const entry of values) { const meta = TXMeta.fromTX(entry.tx); meta.mtime = entry.time; out.push(meta); @@ -2242,152 +2197,134 @@ class TXIndex { return out; } - insert(entry, view) { - const tx = entry.tx; - const hash = tx.hash(); - const addrs = tx.getHashes(view); - - if (addrs.length === 0) - return; - - for (const addr of addrs) { - let items = this.index.get(addr); + /** + * Get entries by address. + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after + */ - if (!items) { - items = new BufferMap(); - this.index.set(addr, items); - } + getEntries(addr, options = {}) { + const {limit, reverse, after} = options; + const key = this.getKey(addr); - assert(!items.has(hash)); - items.set(hash, entry); - } + if (!key) + return []; - this.map.set(hash, addrs); - } + const items = this.index.get(key); - remove(hash) { - const addrs = this.map.get(hash); + if (!items) + return []; - if (!addrs) - return; + let values = []; - for (const addr of addrs) { - const items = this.index.get(addr); + const skip = (after && !items.has(after) && reverse); - assert(items); - assert(items.has(hash)); + if (skip) + return values; - items.delete(hash); + if (after && items.has(after)) { + let index = 0; - if (items.size === 0) - this.index.delete(addr); - } + for (const k of items.keys()) { + if (k.compare(after) === 0) + break; + index += 1; + } - this.map.delete(hash); - } -} + values = Array.from(items.values()); -/** - * Coin Address Index - * @ignore - */ + let start = index + 1; + let end = values.length; -class CoinIndex { - /** - * Create coin address index. - * @constructor - */ + if (end - start > limit) + end = start + limit; - constructor() { - // Map of addr->coins. - this.index = new BufferMap(); + if (reverse) { + start = 0; + end = index; - // Map of outpoint->addr. - this.map = new BufferMap(); - } + if (end - start > limit) + start = end - limit; + } - reset() { - this.index.clear(); - this.map.clear(); - } + values = values.slice(start, end); + } else { + values = Array.from(items.values()); - get(addr) { - const items = this.index.get(addr); + if (values.length > limit) { + let start = 0; + let end = limit; - if (!items) - return []; + if (reverse) { + start = values.length - limit; + end = values.length; + } - const out = []; + values = values.slice(start, end); + } + } - for (const coin of items.values()) - out.push(coin.toCoin()); + if (reverse) + values.reverse(); - return out; + return values; } - insert(tx, index) { - const output = tx.outputs[index]; + insert(entry, view) { + const tx = entry.tx; const hash = tx.hash(); - const addr = output.getHash(); + const addrs = tx.getAddresses(view); - if (!addr) + if (addrs.length === 0) return; - let items = this.index.get(addr); + for (const addr of addrs) { + const key = this.getKey(addr); + + if (!key) + continue; - if (!items) { - items = new BufferMap(); - this.index.set(addr, items); - } + let items = this.index.get(key); - const key = Outpoint.toKey(hash, index); + if (!items) { + items = new BufferMap(); + this.index.set(key, items); + } - assert(!items.has(key)); - items.set(key, new IndexedCoin(tx, index)); + assert(!items.has(hash)); + items.set(hash, entry); + } - this.map.set(key, addr); + this.map.set(hash, addrs); } - remove(hash, index) { - const key = Outpoint.toKey(hash, index); - const addr = this.map.get(key); + remove(hash) { + const addrs = this.map.get(hash); - if (!addr) + if (!addrs) return; - const items = this.index.get(addr); - - assert(items); - assert(items.has(key)); - items.delete(key); + for (const addr of addrs) { + const key = this.getKey(addr); - if (items.size === 0) - this.index.delete(addr); + if (!key) + continue; - this.map.delete(key); - } -} + const items = this.index.get(key); -/** - * Indexed Coin - * @ignore - */ + assert(items); + assert(items.has(hash)); -class IndexedCoin { - /** - * Create an indexed coin. - * @constructor - * @param {TX} tx - * @param {Number} index - */ + items.delete(hash); - constructor(tx, index) { - this.tx = tx; - this.index = index; - } + if (items.size === 0) + this.index.delete(key); + } - toCoin() { - return Coin.fromTX(this.tx, this.index, -1); + this.map.delete(hash); } } diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 72b96b969..0a3612fc0 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -471,21 +471,49 @@ class FullNode extends Node { */ async getMetaByAddress(addr, options = {}) { - const mempool = this.mempool.getMetaByAddress(addr); + if (!this.txindex || !this.addrindex) + return []; - if (this.txindex && this.addrindex) { - const hashes = await this.addrindex.getHashesByAddress(addr, options); - const mtxs = []; + const {reverse, after} = options; + let {limit} = options; + + let metas = []; + + const confirmed = async () => { + const hashes = await this.addrindex.getHashesByAddress( + addr, {limit, reverse, after}); for (const hash of hashes) { const mtx = await this.txindex.getMeta(hash); assert(mtx); - mtxs.push(mtx); + metas.push(mtx); } - return mtxs.concat(mempool); - } + }; + + const unconfirmed = () => { + const mempool = this.mempool.getMetaByAddress( + addr, {limit, reverse, after}); + + metas = metas.concat(mempool); + }; + + if (reverse) + unconfirmed(); + else + await confirmed(); + + if (metas.length > 0) + limit -= metas.length; + + if (limit <= 0) + return metas; + + if (reverse) + await confirmed(); + else + unconfirmed(); - return mempool; + return metas; } /** diff --git a/test/indexer-test.js b/test/indexer-test.js index cd45499c9..562859449 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -220,7 +220,8 @@ describe('Indexer', function() { } ]; - const txids = []; + const confirmed = []; + const unconfirmed = []; const ports = { p2p: 49331, @@ -287,7 +288,7 @@ describe('Indexer', function() { const txid = await wclient.execute( 'sendtoaddress', [v.addr, v.amount]); - txids.push(txid); + confirmed.push(txid); } const blocks = await nclient.execute( @@ -295,6 +296,16 @@ describe('Indexer', function() { assert.equal(blocks.length, 1); } + + // Send unconfirmed to the vector addresses. + for (let i = 0; i < 3; i++) { + for (const v of vectors) { + const txid = await wclient.execute( + 'sendtoaddress', [v.addr, v.amount]); + + unconfirmed.push(txid); + } + } }); after(async () => { @@ -304,36 +315,69 @@ describe('Indexer', function() { }); for (const v of vectors) { - it(`will get txs by ${v.label} address`, async () => { + it(`txs by ${v.label} address`, async () => { const res = await nclient.request( 'GET', `/tx/address/${v.addr}`, {}); - assert.equal(res.length, 10); + assert.equal(res.length, 13); - for (const tx of res) - assert(txids.includes(tx.hash)); + for (let i = 0; i < 10; i++) + assert(confirmed.includes(res[i].hash)); + + for (let i = 10; i < 13; i++) + assert(unconfirmed.includes(res[i].hash)); }); - it(`will get txs by ${v.label} address (limit)`, async () => { + it(`txs by ${v.label} address (limit)`, async () => { const res = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 3}); + assert.equal(res.length, 3); + for (const tx of res) - assert(txids.includes(tx.hash)); + assert(confirmed.includes(tx.hash)); + }); + + it(`txs by ${v.label} address (limit w/ unconf)`, async () => { + const res = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 11}); + + assert.equal(res.length, 11); + + for (let i = 0; i < 10; i++) + assert(confirmed.includes(res[i].hash)); + + for (let i = 10; i < 11; i++) + assert(unconfirmed.includes(res[i].hash)); }); it(`txs by ${v.label} address (reverse)`, async () => { const asc = await nclient.request( 'GET', `/tx/address/${v.addr}`, {reverse: false}); + assert.equal(asc.length, 13); + const dsc = await nclient.request( 'GET', `/tx/address/${v.addr}`, {reverse: true}); - for (let i = 0; i < dsc.length; i++) - assert.equal(asc[i].hash, dsc[dsc.length - i - 1].hash); + assert.equal(asc.length, 13); + + for (let i = 0; i < 10; i++) + assert(confirmed.includes(asc[i].hash)); + + for (let i = 10; i < 13; i++) + assert(unconfirmed.includes(asc[i].hash)); + + // Check the the results are reverse + // of each other. + for (let i = 0; i < dsc.length; i++) { + const atx = asc[i]; + const dtx = dsc[dsc.length - i - 1]; + assert.equal(atx.hash, dtx.hash); + } }); - it(`txs by ${v.label} address after txid`, async () => { + it(`txs by ${v.label} address (after)`, async () => { const one = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 3}); assert.strictEqual(one.length, 3); @@ -351,26 +395,105 @@ describe('Indexer', function() { assert.deepEqual(one.concat(two), all); }); - it(`txs by ${v.label} address after txid (reverse)`, async () => { + it(`txs by ${v.label} address (after w/ unconf)`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 11}); + assert.strictEqual(one.length, 11); + + for (let i = 0; i < 10; i++) + assert(confirmed.includes(one[i].hash)); + + for (let i = 10; i < 11; i++) + assert(unconfirmed.includes(one[i].hash)); + + // The after hash is within the + // unconfirmed transactions. + const hash = one[10].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 1}); + assert.strictEqual(two.length, 1); + assert(unconfirmed.includes(two[0].hash)); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 12}); + assert.strictEqual(all.length, 12); + + assert.deepEqual(one.concat(two), all); + }); + + it(`txs by ${v.label} address (after, reverse)`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 5, reverse: true}); + + assert.strictEqual(one.length, 5); + + for (let i = 0; i < 3; i++) + assert(unconfirmed.includes(one[i].hash)); + + for (let i = 3; i < 5; i++) + assert(confirmed.includes(one[i].hash)); + + // The after hash is within the + // confirmed transactions. + const hash = one[4].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {after: hash, limit: 3, reverse: true}); + + assert.strictEqual(two.length, 3); + for (let i = 0; i < 3; i++) + assert(confirmed.includes(two[i].hash)); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 8, reverse: true}); + + assert.strictEqual(all.length, 8); + + for (let i = 0; i < 3; i++) + assert(unconfirmed.includes(all[i].hash)); + + for (let i = 3; i < 8; i++) + assert(confirmed.includes(all[i].hash)); + + assert.deepEqual(one.concat(two), all); + }); + + it(`txs by ${v.label} address (after, reverse w/ unconf)`, async () => { const one = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 3, reverse: true}); assert.strictEqual(one.length, 3); + for (let i = 0; i < 3; i++) + assert(unconfirmed.includes(one[i].hash)); + // The after hash is within the + // unconfirmed transactions. const hash = one[2].hash; const two = await nclient.request( 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 3, reverse: true}); - assert.strictEqual(one.length, 3); + assert.strictEqual(two.length, 3); + for (let i = 0; i < 3; i++) + assert(confirmed.includes(two[i].hash)); const all = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 6, reverse: true}); - assert.strictEqual(one.length, 3); + assert.strictEqual(all.length, 6); + + for (let i = 0; i < 3; i++) + assert(unconfirmed.includes(all[i].hash)); + + for (let i = 3; i < 6; i++) + assert(confirmed.includes(all[i].hash)); assert.deepEqual(one.concat(two), all); }); From bb797602e68f2e6fff11a0a5a7a7a3a808632f7c Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 16 Apr 2019 14:54:51 -0700 Subject: [PATCH 20/40] node: add http indexer info --- lib/node/http.js | 10 ++++++++++ test/http-test.js | 7 +++++++ 2 files changed, 17 insertions(+) diff --git a/lib/node/http.js b/lib/node/http.js index 2edd8cb2c..01de675fb 100644 --- a/lib/node/http.js +++ b/lib/node/http.js @@ -127,6 +127,16 @@ class HTTP extends Server { tip: this.chain.tip.rhash(), progress: this.chain.getProgress() }, + indexes: { + addr: { + enabled: Boolean(this.node.addrindex), + height: this.node.addrindex ? this.node.addrindex.height : 0 + }, + tx: { + enabled: Boolean(this.node.txindex), + height: this.node.txindex ? this.node.txindex.height : 0 + } + }, pool: { host: addr.host, port: addr.port, diff --git a/test/http-test.js b/test/http-test.js index 93af58484..6c52fffe9 100644 --- a/test/http-test.js +++ b/test/http-test.js @@ -89,6 +89,13 @@ describe('HTTP', function() { assert.strictEqual(info.pool.agent, node.pool.options.agent); assert.typeOf(info.chain, 'object'); assert.strictEqual(info.chain.height, 0); + assert.typeOf(info.indexes, 'object'); + assert.typeOf(info.indexes.addr, 'object'); + assert.equal(info.indexes.addr.enabled, false); + assert.equal(info.indexes.addr.height, 0); + assert.typeOf(info.indexes.tx, 'object'); + assert.equal(info.indexes.addr.enabled, false); + assert.equal(info.indexes.tx.height, 0); }); it('should get wallet info', async () => { From b37ac599735be5a492a4ec1d3ef9e8522ecd6788 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 16 Apr 2019 15:38:49 -0700 Subject: [PATCH 21/40] test: add utility to wait for values --- test/indexer-test.js | 5 +++++ test/util/common.js | 21 +++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/test/indexer-test.js b/test/indexer-test.js index 562859449..8f8bb2f0b 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -16,6 +16,7 @@ const FullNode = require('../lib/node/fullnode'); const Network = require('../lib/protocol/network'); const network = Network.get('regtest'); const {NodeClient, WalletClient} = require('bclient'); +const {forValue} = require('./util/common'); const workers = new WorkerPool({ enabled: true @@ -297,6 +298,8 @@ describe('Indexer', function() { assert.equal(blocks.length, 1); } + await forValue(node.chain, 'height', 160); + // Send unconfirmed to the vector addresses. for (let i = 0; i < 3; i++) { for (const v of vectors) { @@ -306,6 +309,8 @@ describe('Indexer', function() { unconfirmed.push(txid); } } + + await forValue(node.mempool.map, 'size', 6); }); after(async () => { diff --git a/test/util/common.js b/test/util/common.js index 39177fc52..3fd9f501a 100644 --- a/test/util/common.js +++ b/test/util/common.js @@ -102,6 +102,27 @@ common.rimraf = async function(p) { return await fs.rimraf(p); }; +common.forValue = async function(obj, key, val, timeout = 60000) { + assert(typeof obj === 'object'); + assert(typeof key === 'string'); + + const ms = 10; + let interval = null; + let count = 0; + return new Promise((resolve, reject) => { + interval = setInterval(() => { + if (obj[key] === val) { + clearInterval(interval); + resolve(); + } else if (count * ms >= timeout) { + clearInterval(interval); + reject(new Error('Timeout waiting for value.')); + } + count += 1; + }, ms); + }); +}; + function parseUndo(data) { const br = bio.read(data); const items = []; From 80aaa148e4f73cbe91b49a33b340082c1564a9f7 Mon Sep 17 00:00:00 2001 From: Nodar Chkuaselidze Date: Wed, 17 Apr 2019 10:57:58 -0700 Subject: [PATCH 22/40] test: add mempool index persistence test --- test/mempool-test.js | 263 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 263 insertions(+) diff --git a/test/mempool-test.js b/test/mempool-test.js index 247720f6e..4b726ba25 100644 --- a/test/mempool-test.js +++ b/test/mempool-test.js @@ -5,6 +5,8 @@ const assert = require('./util/assert'); const random = require('bcrypto/lib/random'); +const common = require('../lib/blockchain/common'); +const Block = require('../lib/primitives/block'); const MempoolEntry = require('../lib/mempool/mempoolentry'); const Mempool = require('../lib/mempool/mempool'); const WorkerPool = require('../lib/workers/workerpool'); @@ -14,12 +16,16 @@ const Coin = require('../lib/primitives/coin'); const KeyRing = require('../lib/primitives/keyring'); const Address = require('../lib/primitives/address'); const Outpoint = require('../lib/primitives/outpoint'); +const Input = require('../lib/primitives/input'); const Script = require('../lib/script/script'); const opcodes = Script.opcodes; const Witness = require('../lib/script/witness'); const MemWallet = require('./util/memwallet'); const BlockStore = require('../lib/blockstore/level'); +const {BufferSet} = require('buffer-map'); + const ALL = Script.hashType.ALL; +const VERIFY_NONE = common.flags.VERIFY_NONE; const ONE_HASH = Buffer.alloc(32, 0x00); ONE_HASH[0] = 0x01; @@ -69,6 +75,28 @@ function dummyInput(script, hash) { return Coin.fromTX(fund, 0, -1); } +async function getMockBlock(chain, txs = [], cb = true) { + if (cb) { + const raddr = KeyRing.generate().getAddress(); + const mtx = new MTX(); + mtx.addInput(new Input()); + mtx.addOutput(raddr, 0); + + txs = [mtx.toTX(), ...txs]; + } + + const now = Math.floor(Date.now() / 1000); + const time = chain.tip.time <= now ? chain.tip.time + 1 : now; + + const block = new Block(); + block.txs = txs; + block.prevBlock = chain.tip.hash; + block.time = time; + block.bits = await chain.getTarget(block.time, chain.tip); + + return block; +} + describe('Mempool', function() { this.timeout(5000); @@ -463,4 +491,239 @@ describe('Mempool', function() { await blocks.close(); await workers.close(); }); + + describe('Mempool persistent cache', function () { + const workers = new WorkerPool({ + enabled: true + }); + + const blocks = new BlockStore({ + memory: true + }); + + const chain = new Chain({ + memory: true, + workers, + blocks + }); + + const mempool = new Mempool({ + chain, + workers, + memory: true, + indexAddress: true, + persistent: true + }); + + before(async () => { + await blocks.open(); + await mempool.open(); + await chain.open(); + await workers.open(); + }); + + after(async () => { + await workers.close(); + await chain.close(); + await mempool.close(); + await blocks.close(); + }); + + // Number of coins available in + // chaincoins (100k satoshi per coin). + const N = 100; + const chaincoins = new MemWallet(); + const wallet = new MemWallet(); + + it('should create txs in chain', async () => { + const mtx = new MTX(); + mtx.addInput(new Input()); + + for (let i = 0; i < N; i++) { + const addr = chaincoins.createReceive().getAddress(); + mtx.addOutput(addr, 100000); + } + + const cb = mtx.toTX(); + const block = await getMockBlock(chain, [cb], false); + const entry = await chain.add(block, VERIFY_NONE); + + await mempool._addBlock(entry, block.txs); + + // Add 100 blocks so we don't get premature + // spend of coinbase. + for (let i = 0; i < 100; i++) { + const block = await getMockBlock(chain); + const entry = await chain.add(block, VERIFY_NONE); + + await mempool._addBlock(entry, block.txs); + } + + chaincoins.addTX(cb); + }); + + it('should restore txs in the mempool', async () => { + const coins = chaincoins.getCoins(); + + assert.strictEqual(coins.length, N); + + const addrs = []; + const txs = 20; + const spend = 5; + + for (let i = 0; i < txs; i++) + addrs.push(wallet.createReceive().getAddress()); + + const mempoolTXs = new BufferSet(); + const mempoolCoins = new BufferSet(); + + // Send 15 txs to the wallet. + for (let i = 0; i < txs - spend; i++) { + const mtx = new MTX(); + + mtx.addCoin(coins[i]); + mtx.addOutput(addrs[i], 90000); + + chaincoins.sign(mtx); + + const tx = mtx.toTX(); + const missing = await mempool.addTX(tx); + + assert.strictEqual(missing, null); + assert(mempool.hasCoin(tx.hash(), 0)); + + // Indexer checks. + { + const txs = mempool.getTXByAddress(addrs[i]); + + assert.strictEqual(txs.length, 1); + assert.bufferEqual(txs[0].hash(), tx.hash()); + } + + wallet.addTX(tx); + + mempoolTXs.add(tx.hash()); + mempoolCoins.add(Outpoint.fromTX(tx, 0).toKey()); + } + + // Spend first 5 coins from the mempool. + for (let i = 0; i < spend; i++) { + const coin = wallet.getCoins()[0]; + const addr = addrs[txs - spend + i]; + const mtx = new MTX(); + + mtx.addCoin(coin); + mtx.addOutput(addr, 80000); + + wallet.sign(mtx); + + const tx = mtx.toTX(); + const missing = await mempool.addTX(tx); + + assert.strictEqual(missing, null); + assert(!mempool.hasCoin(coin.hash, 0)); + assert(mempool.hasCoin(tx.hash(), 0)); + + { + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 1); + } + + { + const txs = mempool.getTXByAddress(addrs[i]); + assert.strictEqual(txs.length, 2); + } + + mempoolTXs.add(tx.hash()); + mempoolCoins.delete(coin.toKey()); + mempoolCoins.add(Outpoint.fromTX(tx, 0).toKey()); + + wallet.addTX(tx); + } + + const verifyMempoolState = (mempool) => { + // Verify general state of the mempool. + assert.strictEqual(mempool.map.size, txs); + assert.strictEqual(mempool.spents.size, txs); + + assert.strictEqual(mempool.addrindex.map.size, txs); + + // Verify txs are same. + for (const val of mempoolTXs.values()) + assert(mempool.getTX(val)); + + for (const opkey of mempoolCoins.values()) { + const outpoint = Outpoint.fromRaw(opkey); + assert(mempool.hasCoin(outpoint.hash, outpoint.index)); + } + + // Coins in these txs are spent. + for (let i = 0; i < spend; i++) { + const addr = addrs[i]; + + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 2); + } + + // These txs are untouched. + for (let i = spend; i < txs - spend; i++) { + const addr = addrs[i]; + + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 1); + } + + // These are txs spending mempool txs. + for (let i = txs - spend; i < txs; i++) { + const addr = addrs[i]; + + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 1); + } + }; + + verifyMempoolState(mempool); + + // Hack to get in memory cache in new mempool. + const cache = mempool.cache; + + // We need to manually sync because when first block + // was mined there were no mempool txs. + await cache.sync(chain.tip.hash); + + // Apply batch to the memdb. + await cache.flush(); + await mempool.close(); + + let err; + { + const mempool = new Mempool({ + chain, + workers, + memory: true, + indexAddress: true, + persistent: true + }); + + mempool.cache = cache; + + await mempool.open(); + + try { + verifyMempoolState(mempool); + } catch (e) { + err = e; + } finally { + await cache.wipe(); + await mempool.close(); + } + } + + // Reopen for after cleanup. + await mempool.open(); + + if (err) + throw err; + }); + }); }); From f30276bc278535f910de0a3408f495e7f2b848b0 Mon Sep 17 00:00:00 2001 From: Nodar Chkuaselidze Date: Wed, 17 Apr 2019 10:58:08 -0700 Subject: [PATCH 23/40] test: add mempool indexing tests --- test/mempool-test.js | 288 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 288 insertions(+) diff --git a/test/mempool-test.js b/test/mempool-test.js index 4b726ba25..0f02c0237 100644 --- a/test/mempool-test.js +++ b/test/mempool-test.js @@ -492,6 +492,294 @@ describe('Mempool', function() { await workers.close(); }); + describe('Index', function () { + const workers = new WorkerPool({ + enabled: true + }); + + const blocks = new BlockStore({ + memory: true + }); + + const chain = new Chain({ + memory: true, + workers, + blocks + }); + + const mempool = new Mempool({ + chain, + workers, + memory: true, + indexAddress: true + }); + + before(async () => { + await blocks.open(); + await mempool.open(); + await chain.open(); + await workers.open(); + }); + + after(async () => { + await workers.close(); + await chain.close(); + await mempool.close(); + await blocks.close(); + }); + + // Number of coins available in + // chaincoins (100k satoshi per coin). + const N = 100; + const chaincoins = new MemWallet(); + const wallet = new MemWallet(); + + it('should create coins in chain', async () => { + const mtx = new MTX(); + mtx.addInput(new Input()); + + for (let i = 0; i < N; i++) { + const addr = chaincoins.createReceive().getAddress(); + mtx.addOutput(addr, 100000); + } + + const cb = mtx.toTX(); + const block = await getMockBlock(chain, [cb], false); + const entry = await chain.add(block, VERIFY_NONE); + + await mempool._addBlock(entry, block.txs); + + // Add 100 blocks so we don't get + // premature spend of coinbase. + for (let i = 0; i < 100; i++) { + const block = await getMockBlock(chain); + const entry = await chain.add(block, VERIFY_NONE); + + await mempool._addBlock(entry, block.txs); + } + + chaincoins.addTX(cb); + }); + + it('should spend txs and coins in the mempool', async () => { + // Verify coins are removed from the coin index. + const coin = chaincoins.getCoins()[0]; + const addr = wallet.createReceive().getAddress(); + + const mtx1 = new MTX(); + + mtx1.addCoin(coin); + mtx1.addOutput(addr, 90000); + + chaincoins.sign(mtx1); + + const tx1 = mtx1.toTX(); + + chaincoins.addTX(tx1, -1); + wallet.addTX(tx1, -1); + + { + const missing = await mempool.addTX(tx1); + assert.strictEqual(missing, null); + } + + assert(mempool.hasCoin(tx1.hash(), 0)); + + { + const txs = mempool.getTXByAddress(addr); + const metas = mempool.getMetaByAddress(addr); + + assert.strictEqual(txs.length, 1); + assert.strictEqual(metas.length, 1); + + assert.bufferEqual(txs[0].hash(), tx1.hash()); + } + + const mtx2 = new MTX(); + + mtx2.addTX(tx1, 0, -1); + mtx2.addOutput(addr, 80000); + + wallet.sign(mtx2); + + const tx2 = mtx2.toTX(); + + { + const missing = await mempool.addTX(tx2); + assert.strictEqual(missing, null); + } + + wallet.addTX(tx2, -1); + + assert(!mempool.hasCoin(tx1.hash(), 0)); + assert(mempool.hasCoin(tx2.hash(), 0)); + + { + const txs = mempool.getTXByAddress(addr); + + assert.strictEqual(txs.length, 2); + } + }); + + it('should spend resolved orphans', async () => { + const coin = chaincoins.getCoins()[0]; + const addr = wallet.createReceive().getAddress(); + + const pmtx = new MTX(); + + pmtx.addOutput(addr, 90000); + pmtx.addCoin(coin); + + chaincoins.sign(pmtx); + + const parentTX = pmtx.toTX(); + + const cmtx = new MTX(); + + cmtx.addTX(pmtx.toTX(), 0, -1); + cmtx.addOutput(addr, 80000); + + wallet.sign(cmtx); + + const childTX = cmtx.toTX(); + + { + // Create orphan tx. + const missing = await mempool.addTX(childTX); + + // We only have one input missing. + assert.strictEqual(missing.length, 1); + } + + { + const txs = mempool.getTXByAddress(addr); + + assert.strictEqual(txs.length, 0); + } + + { + // Orphans are not coins. + const childCoin = mempool.getCoin(childTX.hash(), 0); + assert.strictEqual(childCoin, null); + } + + { + // Orphans should be resolved. + const missing = await mempool.addTX(parentTX); + assert.strictEqual(missing, null); + + // Coins should be available once they are resolved. + const parentCoin = mempool.getCoin(parentTX.hash(), 0); + + // We spent this. + assert.strictEqual(parentCoin, null); + + const childCoin = mempool.getCoin(childTX.hash(), 0); + assert(childCoin); + } + + { + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 2); + } + + // Update coins in wallets. + for (const tx of [parentTX, childTX]) { + chaincoins.addTX(tx); + wallet.addTX(tx); + } + }); + + it('should remove double spend tx from mempool', async () => { + const coin = chaincoins.getCoins()[0]; + const addr = wallet.createReceive().getAddress(); + const randomAddress = KeyRing.generate().getAddress(); + + // We check double spending our mempool tx. + const mtx1 = new MTX(); + + mtx1.addCoin(coin); + mtx1.addOutput(addr, 90000); + + chaincoins.sign(mtx1); + + // This will double spend in block. + const mtx2 = new MTX(); + + mtx2.addCoin(coin); + mtx2.addOutput(randomAddress, 90000); + + chaincoins.sign(mtx2); + + const tx1 = mtx1.toTX(); + const tx2 = mtx2.toTX(); + + { + const missing = await mempool.addTX(tx1); + assert.strictEqual(missing, null); + } + + { + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 1); + } + + assert(mempool.hasCoin(tx1.hash(), 0)); + + const block = await getMockBlock(chain, [tx2]); + const entry = await chain.add(block, VERIFY_NONE); + + await mempool._addBlock(entry, block.txs); + + { + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 0); + } + + assert(!mempool.hasCoin(tx1.hash(), 0)); + + chaincoins.addTX(tx2); + }); + + it('should remove confirmed txs from mempool', async () => { + const coin = chaincoins.getCoins()[0]; + const addr = wallet.createReceive().getAddress(); + + const mtx = new MTX(); + + mtx.addCoin(coin); + mtx.addOutput(addr, 90000); + + chaincoins.sign(mtx); + + const tx = mtx.toTX(); + + await mempool.addTX(tx); + + assert(mempool.hasCoin(tx.hash(), 0)); + + { + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 1); + } + + const block = await getMockBlock(chain, [tx]); + const entry = await chain.add(block, VERIFY_NONE); + + await mempool._addBlock(entry, block.txs); + + { + const txs = mempool.getTXByAddress(addr); + assert.strictEqual(txs.length, 0); + } + + assert(!mempool.hasCoin(tx.hash(), 0)); + + chaincoins.addTX(tx); + wallet.addTX(tx); + }); + }); + describe('Mempool persistent cache', function () { const workers = new WorkerPool({ enabled: true From fae647b9e97b2e31e2eb667ab927d7a55450644e Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 17 Apr 2019 17:18:07 -0700 Subject: [PATCH 24/40] indexer: simplify block meta and remove records --- lib/indexer/indexer.js | 29 +++++--- lib/indexer/layout.js | 9 +-- lib/indexer/records.js | 152 ----------------------------------------- 3 files changed, 24 insertions(+), 166 deletions(-) delete mode 100644 lib/indexer/records.js diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 702d5a4ad..7c092cf24 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -15,7 +15,7 @@ const Logger = require('blgr'); const Network = require('../protocol/network'); const util = require('../utils/util'); const layout = require('./layout'); -const {BlockMeta} = require('./records'); +const {ZERO_HASH} = require('../protocol/consensus'); /** * Indexer @@ -221,11 +221,7 @@ class Indexer extends EventEmitter { if (!data) return null; - const block = new BlockMeta(); - block.hash = data; - block.height = height; - - return block; + return new BlockMeta(data, height); } /** @@ -399,7 +395,7 @@ class Indexer extends EventEmitter { if (entry.height !== this.height + 1) throw new Error('Indexer: Can not add block.'); - const tip = BlockMeta.fromEntry(entry); + const tip = new BlockMeta(entry.hash, entry.height); // Start the batch write. this.start(); @@ -458,7 +454,7 @@ class Indexer extends EventEmitter { if (entry.height !== this.height) throw new Error('Indexer: Can not remove block.'); - const tip = BlockMeta.fromEntry(entry); + const tip = new BlockMeta(entry.hash, entry.height); // Start the batch write. this.start(); @@ -498,7 +494,7 @@ class Indexer extends EventEmitter { } // Add to batch write to save tip and height. - this.put(layout.h.encode(tip.height), tip.toHash()); + this.put(layout.h.encode(tip.height), tip.hash); const raw = bio.write(4).writeU32(tip.height).render(); this.put(layout.R.encode(), raw); @@ -544,9 +540,22 @@ class Indexer extends EventEmitter { } } +/** + * Block Meta + */ + +class BlockMeta { + constructor(hash, height) { + this.hash = hash || ZERO_HASH; + this.height = height || 0; + + assert(Buffer.isBuffer(this.hash) && this.hash.length === 32); + assert(Number.isInteger(this.height)); + } +} + /** * Index Options - * @alias module:indexer.IndexOptions */ class IndexOptions { diff --git a/lib/indexer/layout.js b/lib/indexer/layout.js index e2bc243c3..3edbd9ede 100644 --- a/lib/indexer/layout.js +++ b/lib/indexer/layout.js @@ -9,12 +9,13 @@ const bdb = require('bdb'); /* - * Index Database Layout: - * To be extended by indexer implementations + * Index database layout: + * To be extended by indexer implementations. + * * V -> db version * O -> flags - * h[height] -> recent block hash - * R -> chain sync state + * h[height] -> block hash + * R -> index sync height */ const layout = { diff --git a/lib/indexer/records.js b/lib/indexer/records.js deleted file mode 100644 index 7db64b943..000000000 --- a/lib/indexer/records.js +++ /dev/null @@ -1,152 +0,0 @@ -/*! - * records.js - indexer records - * Copyright (c) 2018, the bcoin developers (MIT License). - * https://github.com/bcoin-org/bcoin - */ - -'use strict'; - -/** - * @module lib/records - */ - -const bio = require('bufio'); -const util = require('../utils/util'); -const consensus = require('../protocol/consensus'); - -/** - * Block Meta - * @alias module:indexer.BlockMeta - */ - -class BlockMeta { - /** - * Create block meta. - * @constructor - * @param {Hash} hash - * @param {Number} height - */ - - constructor(hash, height) { - this.hash = hash || consensus.NULL_HASH; - this.height = height != null ? height : -1; - } - - /** - * Clone the block. - * @returns {BlockMeta} - */ - - clone() { - return new this.constructor(this.hash, this.height); - } - - /** - * Get block meta hash as a buffer. - * @returns {Buffer} - */ - - toHash() { - return Buffer.from(this.hash, 'hex'); - } - - /** - * Instantiate block meta from chain entry. - * @private - * @param {IndexEntry} entry - */ - - fromEntry(entry) { - this.hash = entry.hash; - this.height = entry.height; - return this; - } - - /** - * Instantiate block meta from json object. - * @private - * @param {Object} json - */ - - fromJSON(json) { - this.hash = util.revHex(json.hash); - this.height = json.height; - return this; - } - - /** - * Instantiate block meta from serialized tip data. - * @private - * @param {Buffer} data - */ - - fromRaw(data) { - const br = bio.read(data); - this.hash = br.readHash('hex'); - this.height = br.readI32(); - return this; - } - - /** - * Instantiate block meta from chain entry. - * @param {IndexEntry} entry - * @returns {BlockMeta} - */ - - static fromEntry(entry) { - return new this().fromEntry(entry); - } - - /** - * Instantiate block meta from json object. - * @param {Object} json - * @returns {BlockMeta} - */ - - static fromJSON(json) { - return new this().fromJSON(json); - } - - /** - * Instantiate block meta from serialized data. - * @param {Hash} hash - * @param {Buffer} data - * @returns {BlockMeta} - */ - - static fromRaw(data) { - return new this().fromRaw(data); - } - - /** - * Serialize the block meta. - * @returns {Buffer} - */ - - toRaw() { - const bw = bio.write(36); - bw.writeHash(this.hash); - bw.writeI32(this.height); - return bw.render(); - } - - /** - * Convert the block meta to a more json-friendly object. - * @returns {Object} - */ - - toJSON() { - return { - hash: util.revHex(this.hash), - height: this.height - }; - } -} - -/* - * Expose - */ - -exports.BlockMeta = BlockMeta; - -module.exports = exports; From 69e9844f82c936a33a793a5f390ffd5f01bacd04 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 17 Apr 2019 17:50:52 -0700 Subject: [PATCH 25/40] test: cleanup and add txindex tests --- lib/indexer/txindexer.js | 3 --- test/indexer-test.js | 37 +++++++++++++++++++++++++++++++------ 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index 15e73f534..4c1d5eb09 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -124,9 +124,6 @@ class TxRecord { this.height = br.readU32(); this.index = br.readU32(); - if (this.index === 0x7fffffff) - this.index = -1; - this.offset = br.readU32(); this.length = br.readU32(); diff --git a/test/indexer-test.js b/test/indexer-test.js index 8f8bb2f0b..e26003027 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -80,10 +80,14 @@ describe('Indexer', function() { }); describe('index 10 blocks', function() { + let addr = null; + before(async () => { miner.addresses.length = 0; miner.addAddress(wallet.getReceive()); + addr = miner.getAddress(); + for (let i = 0; i < 10; i++) { const block = await cpu.mineBlock(); assert(block); @@ -101,13 +105,11 @@ describe('Indexer', function() { }); it('should get txs by address (limit)', async () => { - const addr = miner.getAddress(); const hashes = await addrindexer.getHashesByAddress(addr, {limit: 1}); assert.strictEqual(hashes.length, 1); }); it('should get txs by address (reverse)', async () => { - const addr = miner.getAddress(); const hashes = await addrindexer.getHashesByAddress( addr, {reverse: false}); @@ -122,8 +124,7 @@ describe('Indexer', function() { assert.deepEqual(hashes[i], reversed[9 - i]); }); - it('should txs by address after txid', async () => { - const addr = miner.getAddress(); + it('should get txs by address after txid', async () => { const hashes = await addrindexer.getHashesByAddress(addr, {limit: 5}); assert.strictEqual(hashes.length, 5); @@ -141,8 +142,7 @@ describe('Indexer', function() { assert.deepEqual(hashes.concat(next), all); }); - it('should txs by address after txid (reverse)', async () => { - const addr = miner.getAddress(); + it('should get txs by address after txid (reverse)', async () => { const hashes = await addrindexer.getHashesByAddress( addr, {limit: 5, reverse: true}); @@ -162,6 +162,31 @@ describe('Indexer', function() { assert.deepEqual(hashes.concat(next), all); }); + + it('should get tx and meta', async () => { + const hashes = await addrindexer.getHashesByAddress(addr, {limit: 1}); + assert.equal(hashes.length, 1); + const hash = hashes[0]; + + const tx = await txindexer.getTX(hash); + const meta = await txindexer.getMeta(hash); + + assert(meta.height); + assert(meta.block); + assert(meta.time); + + assert.deepEqual(meta.tx, tx); + }); + + it('should get null if not found for tx and meta', async () => { + const hash = Buffer.alloc(32); + + const tx = await txindexer.getTX(hash); + const meta = await txindexer.getMeta(hash); + + assert.strictEqual(tx, null); + assert.strictEqual(meta, null); + }); }); describe('rescan and reorg', function() { From b4be8574e55a18ed9e8afe66ec98cc6e8e1474c8 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 18 Apr 2019 10:12:04 -0700 Subject: [PATCH 26/40] test: improve addrindex tests - Vectors for p2wsh and p2sh. - Edge case for witness program. - Improve determinism by not comparing mtime. - Various edge and error cases. --- lib/indexer/addrindexer.js | 30 +++- lib/mempool/addrindexer.js | 235 ++++++++++++++++++++++++++ lib/mempool/mempool.js | 213 +----------------------- test/indexer-test.js | 327 +++++++++++++++++++++++++++++++------ test/mempool-test.js | 36 ++++ test/util/assert.js | 7 +- 6 files changed, 576 insertions(+), 272 deletions(-) create mode 100644 lib/mempool/addrindexer.js diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 6f463c070..192581eff 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -138,8 +138,7 @@ class AddrIndexer extends Indexer { const hash = tx.hash(); const count = new Count(height, i); - this.put(layout.C.encode(height, i), hash); - this.put(layout.c.encode(hash), count.toRaw()); + let hasAddress = false; for (const addr of tx.getAddresses(view)) { const prefix = addr.getPrefix(); @@ -150,6 +149,13 @@ class AddrIndexer extends Indexer { const addrHash = addr.getHash(); this.put(layout.A.encode(prefix, addrHash, height, i), null); + + hasAddress = true; + } + + if (hasAddress) { + this.put(layout.C.encode(height, i), hash); + this.put(layout.c.encode(hash), count.toRaw()); } } } @@ -169,8 +175,7 @@ class AddrIndexer extends Indexer { const tx = block.txs[i]; const hash = tx.hash(); - this.del(layout.C.encode(height, i)); - this.del(layout.c.encode(hash)); + let hasAddress = false; for (const addr of tx.getAddresses(view)) { const prefix = addr.getPrefix(); @@ -181,6 +186,13 @@ class AddrIndexer extends Indexer { const addrHash = addr.getHash(); this.del(layout.A.encode(prefix, addrHash, height, i)); + + hasAddress = true; + } + + if (hasAddress) { + this.del(layout.C.encode(height, i)); + this.del(layout.c.encode(hash)); } } } @@ -222,13 +234,19 @@ class AddrIndexer extends Indexer { } }; + // Determine if the hash -> height + index mapping exists. const hasAfter = (after && await this.db.has(layout.c.encode(after))); - const skip = (after && !hasAfter && !reverse); + // Check to see if results should be skipped because + // the after hash is expected to be within a following + // mempool query. + const skip = (after && !hasAfter && !reverse); if (skip) return []; if (after && hasAfter) { + // Give results starting from after + // the tx hash for the address. const raw = await this.db.get(layout.c.encode(after)); const count = Count.fromRaw(raw); const {height, index} = count; @@ -241,6 +259,8 @@ class AddrIndexer extends Indexer { opts.lt = layout.A.max(prefix, hash, height, index); } } else { + // Give earliest or latest results + // for the address. opts.gte = layout.A.min(prefix, hash); opts.lte = layout.A.max(prefix, hash); } diff --git a/lib/mempool/addrindexer.js b/lib/mempool/addrindexer.js new file mode 100644 index 000000000..e28353b6e --- /dev/null +++ b/lib/mempool/addrindexer.js @@ -0,0 +1,235 @@ +/*! + * mempool.js - mempool for bcoin + * Copyright (c) 2018-2019, the bcoin developers (MIT License). + * Copyright (c) 2014-2017, Christopher Jeffrey (MIT License). + * https://github.com/bcoin-org/bcoin + */ + +'use strict'; + +const assert = require('bsert'); +const {BufferMap} = require('buffer-map'); +const TXMeta = require('../primitives/txmeta'); + +/** + * Address Indexer + * @ignore + */ + +class AddrIndexer { + /** + * Create TX address index. + * @constructor + */ + + constructor() { + // Map of addr->entries. + this.index = new BufferMap(); + + // Map of txid->addrs. + this.map = new BufferMap(); + } + + reset() { + this.index.clear(); + this.map.clear(); + } + + getKey(addr) { + const prefix = addr.getPrefix(); + + if (prefix < 0) + return null; + + const raw = Buffer.allocUnsafe(1); + raw.writeUInt8(prefix); + + return Buffer.concat([raw, addr.getHash()]); + } + + /** + * Get transactions by address. + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after + */ + + get(addr, options = {}) { + const values = this.getEntries(addr, options); + + const out = []; + + for (const entry of values) + out.push(entry.tx); + + return out; + } + + /** + * Get transaction meta by address. + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after + */ + + getMeta(addr, options = {}) { + const values = this.getEntries(addr, options); + + const out = []; + + for (const entry of values) { + const meta = TXMeta.fromTX(entry.tx); + meta.mtime = entry.time; + out.push(meta); + } + + return out; + } + + /** + * Get entries by address. + * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after + */ + + getEntries(addr, options = {}) { + const {limit, reverse, after} = options; + const key = this.getKey(addr); + + if (!key) + return []; + + const items = this.index.get(key); + + if (!items) + return []; + + let values = []; + + // Check to see if results should be skipped because + // the after hash is expected to be within a following + // confirmed query. + const skip = (after && !items.has(after) && reverse); + + if (skip) + return values; + + if (after && items.has(after)) { + // Give results starting from after + // the tx hash for the address. + let index = 0; + + for (const k of items.keys()) { + if (k.compare(after) === 0) + break; + index += 1; + } + + values = Array.from(items.values()); + + let start = index + 1; + let end = values.length; + + if (end - start > limit) + end = start + limit; + + if (reverse) { + start = 0; + end = index; + + if (end > limit) + start = end - limit; + } + + values = values.slice(start, end); + } else { + // Give earliest or latest results + // for the address. + values = Array.from(items.values()); + + if (values.length > limit) { + let start = 0; + let end = limit; + + if (reverse) { + start = values.length - limit; + end = values.length; + } + + values = values.slice(start, end); + } + } + + if (reverse) + values.reverse(); + + return values; + } + + insert(entry, view) { + const tx = entry.tx; + const hash = tx.hash(); + const addrs = tx.getAddresses(view); + + if (addrs.length === 0) + return; + + for (const addr of addrs) { + const key = this.getKey(addr); + + if (!key) + continue; + + let items = this.index.get(key); + + if (!items) { + items = new BufferMap(); + this.index.set(key, items); + } + + assert(!items.has(hash)); + items.set(hash, entry); + } + + this.map.set(hash, addrs); + } + + remove(hash) { + const addrs = this.map.get(hash); + + if (!addrs) + return; + + for (const addr of addrs) { + const key = this.getKey(addr); + + if (!key) + continue; + + const items = this.index.get(key); + + assert(items); + assert(items.has(hash)); + + items.delete(hash); + + if (items.size === 0) + this.index.delete(key); + } + + this.map.delete(hash); + } +} + +/* + * Expose + */ + +module.exports = AddrIndexer; diff --git a/lib/mempool/mempool.js b/lib/mempool/mempool.js index 7410a5d0e..5492a9c13 100644 --- a/lib/mempool/mempool.js +++ b/lib/mempool/mempool.js @@ -27,6 +27,7 @@ const TXMeta = require('../primitives/txmeta'); const MempoolEntry = require('./mempoolentry'); const Network = require('../protocol/network'); const layout = require('./layout'); +const AddrIndexer = require('./addrindexer'); const Fees = require('./fees'); const CoinView = require('../coins/coinview'); @@ -72,7 +73,7 @@ class Mempool extends EventEmitter { this.spents = new BufferMap(); this.rejects = new RollingFilter(120000, 0.000001); - this.addrindex = new AddrIndex(); + this.addrindex = new AddrIndexer(); } /** @@ -2118,216 +2119,6 @@ class MempoolOptions { } } -/** - * Address Index - * @ignore - */ - -class AddrIndex { - /** - * Create TX address index. - * @constructor - */ - - constructor() { - // Map of addr->entries. - this.index = new BufferMap(); - - // Map of txid->addrs. - this.map = new BufferMap(); - } - - reset() { - this.index.clear(); - this.map.clear(); - } - - getKey(addr) { - const prefix = addr.getPrefix(); - - if (prefix < 0) - return null; - - const raw = Buffer.allocUnsafe(1); - raw.writeUInt8(prefix); - - return Buffer.concat([raw, addr.getHash()]); - } - - /** - * Get transactions by address. - * @param {Address} addr - * @param {Object} options - * @param {Number} options.limit - * @param {Number} options.reverse - * @param {Buffer} options.after - */ - - get(addr, options = {}) { - const values = this.getEntries(addr, options); - - const out = []; - - for (const entry of values) - out.push(entry.tx); - - return out; - } - - /** - * Get transaction meta by address. - * @param {Address} addr - * @param {Object} options - * @param {Number} options.limit - * @param {Number} options.reverse - * @param {Buffer} options.after - */ - - getMeta(addr, options = {}) { - const values = this.getEntries(addr, options); - - const out = []; - - for (const entry of values) { - const meta = TXMeta.fromTX(entry.tx); - meta.mtime = entry.time; - out.push(meta); - } - - return out; - } - - /** - * Get entries by address. - * @param {Address} addr - * @param {Object} options - * @param {Number} options.limit - * @param {Number} options.reverse - * @param {Buffer} options.after - */ - - getEntries(addr, options = {}) { - const {limit, reverse, after} = options; - const key = this.getKey(addr); - - if (!key) - return []; - - const items = this.index.get(key); - - if (!items) - return []; - - let values = []; - - const skip = (after && !items.has(after) && reverse); - - if (skip) - return values; - - if (after && items.has(after)) { - let index = 0; - - for (const k of items.keys()) { - if (k.compare(after) === 0) - break; - index += 1; - } - - values = Array.from(items.values()); - - let start = index + 1; - let end = values.length; - - if (end - start > limit) - end = start + limit; - - if (reverse) { - start = 0; - end = index; - - if (end - start > limit) - start = end - limit; - } - - values = values.slice(start, end); - } else { - values = Array.from(items.values()); - - if (values.length > limit) { - let start = 0; - let end = limit; - - if (reverse) { - start = values.length - limit; - end = values.length; - } - - values = values.slice(start, end); - } - } - - if (reverse) - values.reverse(); - - return values; - } - - insert(entry, view) { - const tx = entry.tx; - const hash = tx.hash(); - const addrs = tx.getAddresses(view); - - if (addrs.length === 0) - return; - - for (const addr of addrs) { - const key = this.getKey(addr); - - if (!key) - continue; - - let items = this.index.get(key); - - if (!items) { - items = new BufferMap(); - this.index.set(key, items); - } - - assert(!items.has(hash)); - items.set(hash, entry); - } - - this.map.set(hash, addrs); - } - - remove(hash) { - const addrs = this.map.get(hash); - - if (!addrs) - return; - - for (const addr of addrs) { - const key = this.getKey(addr); - - if (!key) - continue; - - const items = this.index.get(key); - - assert(items); - assert(items.has(hash)); - - items.delete(hash); - - if (items.size === 0) - this.index.delete(key); - } - - this.map.delete(hash); - } -} - /** * Orphan * @ignore diff --git a/test/indexer-test.js b/test/indexer-test.js index e26003027..7249a1605 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -5,6 +5,9 @@ const assert = require('./util/assert'); const reorg = require('./util/reorg'); +const Script = require('../lib/script/script'); +const Opcode = require('../lib/script/opcode'); +const Address = require('../lib/primitives/address'); const Chain = require('../lib/blockchain/chain'); const WorkerPool = require('../lib/workers/workerpool'); const Miner = require('../lib/mining/miner'); @@ -18,6 +21,34 @@ const network = Network.get('regtest'); const {NodeClient, WalletClient} = require('bclient'); const {forValue} = require('./util/common'); +const vectors = [ + // Secret for the public key vectors: + // cVDJUtDjdaM25yNVVDLLX3hcHUfth4c7tY3rSc4hy9e8ibtCuj6G + { + addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', + amount: 19.99, + label: 'p2wpkh' + }, + { + addr: 'muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi', + amount: 1.99, + label: 'p2pkh' + }, + // Secrets for 1 of 2 multisig vectors: + // cVDJUtDjdaM25yNVVDLLX3hcHUfth4c7tY3rSc4hy9e8ibtCuj6G + // 93KCDD4LdP4BDTNBXrvKUCVES2jo9dAKKvhyWpNEMstuxDauHty + { + addr: 'bcrt1q2nj8e2nhmsa4hl9qw3xas7l5n2547h5uhlj47nc3pqfxaeq5rtjs9g328g', + amount: 0.99, + label: 'p2wsh' + }, + { + addr: '2Muy8nSQaMsMFAZwPyiXSEMTVFJv9iYuhwT', + amount: 0.11, + label: 'p2sh' + } +]; + const workers = new WorkerPool({ enabled: true }); @@ -79,7 +110,89 @@ describe('Indexer', function() { await addrindexer.close(); }); - describe('index 10 blocks', function() { + describe('Unit', function() { + it('should not index transaction w/ invalid address', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {} + }); + + const ops = []; + + indexer.put = (key, value) => ops.push([key, value]); + indexer.del = (key, value) => ops.push([key, value]); + + // Create a witness program version 1 with + // 40 byte data push. + const script = new Script(); + script.push(Opcode.fromSmall(1)); + script.push(Opcode.fromData(Buffer.alloc(40))); + script.compile(); + const addr = Address.fromScript(script); + + const tx = { + getAddresses: () => [addr], + hash: () => Buffer.alloc(32) + }; + + const entry = {height: 323549}; + const block = {txs: [tx]}; + const view = {}; + + indexer.indexBlock(entry, block, view); + indexer.unindexBlock(entry, block, view); + + assert.equal(ops.length, 0); + }); + + it('should index transaction w/ valid address', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {} + }); + + const ops = []; + + indexer.put = (key, value) => ops.push([key, value]); + indexer.del = (key, value) => ops.push([key, value]); + + // Create a witness program version 0 with + // 20 byte data push. + const script = new Script(); + script.push(Opcode.fromSmall(0)); + script.push(Opcode.fromData(Buffer.alloc(20))); + script.compile(); + const addr = Address.fromScript(script); + + const tx = { + getAddresses: () => [addr], + hash: () => Buffer.alloc(32) + }; + + const entry = {height: 323549}; + const block = {txs: [tx]}; + const view = {}; + + indexer.indexBlock(entry, block, view); + indexer.unindexBlock(entry, block, view); + + assert.equal(ops.length, 6); + }); + + it('should error with limits', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {}, + maxTxs: 10 + }); + + await assert.asyncThrows(async () => { + await indexer.getHashesByAddress(vectors[0].addr, {limit: 11}); + }, 'Limit above max'); + }); + }); + + describe('Index 10 blocks', function() { let addr = null; before(async () => { @@ -189,7 +302,7 @@ describe('Indexer', function() { }); }); - describe('rescan and reorg', function() { + describe('Rescan and reorg', function() { it('should rescan and reindex 10 missed blocks', async () => { for (let i = 0; i < 10; i++) { const block = await cpu.mineBlock(); @@ -226,26 +339,11 @@ describe('Indexer', function() { }); }); - describe('http', function() { + describe('HTTP', function() { this.timeout(120000); let node, nclient, wclient = null; - const vectors = [ - // Secret for the vectors: - // cVDJUtDjdaM25yNVVDLLX3hcHUfth4c7tY3rSc4hy9e8ibtCuj6G - { - addr: 'bcrt1qngw83fg8dz0k749cg7k3emc7v98wy0c7azaa6h', - amount: 19.99, - label: 'p2wpkh' - }, - { - addr: 'muZpTpBYhxmRFuCjLc7C6BBDF32C8XVJUi', - amount: 1.99, - label: 'p2pkh' - } - ]; - const confirmed = []; const unconfirmed = []; @@ -255,6 +353,15 @@ describe('Indexer', function() { wallet: 49333 }; + function sanitize(txs) { + return txs.map((tx) => { + // Remove mtime from the results for deep + // comparisons as it can be variable. + delete tx.mtime; + return tx; + }); + } + before(async () => { this.timeout(120000); @@ -326,7 +433,7 @@ describe('Indexer', function() { await forValue(node.chain, 'height', 160); // Send unconfirmed to the vector addresses. - for (let i = 0; i < 3; i++) { + for (let i = 0; i < 5; i++) { for (const v of vectors) { const txid = await wclient.execute( 'sendtoaddress', [v.addr, v.amount]); @@ -335,7 +442,7 @@ describe('Indexer', function() { } } - await forValue(node.mempool.map, 'size', 6); + await forValue(node.mempool.map, 'size', 20); }); after(async () => { @@ -345,20 +452,20 @@ describe('Indexer', function() { }); for (const v of vectors) { - it(`txs by ${v.label} address`, async () => { + it(`txs by ${v.label} addr`, async () => { const res = await nclient.request( 'GET', `/tx/address/${v.addr}`, {}); - assert.equal(res.length, 13); + assert.equal(res.length, 15); for (let i = 0; i < 10; i++) assert(confirmed.includes(res[i].hash)); - for (let i = 10; i < 13; i++) + for (let i = 10; i < 15; i++) assert(unconfirmed.includes(res[i].hash)); }); - it(`txs by ${v.label} address (limit)`, async () => { + it(`txs by ${v.label} addr (limit)`, async () => { const res = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 3}); @@ -368,7 +475,7 @@ describe('Indexer', function() { assert(confirmed.includes(tx.hash)); }); - it(`txs by ${v.label} address (limit w/ unconf)`, async () => { + it(`txs by ${v.label} addr (limit w/ unconf)`, async () => { const res = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 11}); @@ -381,21 +488,21 @@ describe('Indexer', function() { assert(unconfirmed.includes(res[i].hash)); }); - it(`txs by ${v.label} address (reverse)`, async () => { + it(`txs by ${v.label} addr (reverse)`, async () => { const asc = await nclient.request( 'GET', `/tx/address/${v.addr}`, {reverse: false}); - assert.equal(asc.length, 13); + assert.equal(asc.length, 15); const dsc = await nclient.request( 'GET', `/tx/address/${v.addr}`, {reverse: true}); - assert.equal(asc.length, 13); + assert.equal(dsc.length, 15); for (let i = 0; i < 10; i++) assert(confirmed.includes(asc[i].hash)); - for (let i = 10; i < 13; i++) + for (let i = 10; i < 15; i++) assert(unconfirmed.includes(asc[i].hash)); // Check the the results are reverse @@ -407,11 +514,16 @@ describe('Indexer', function() { } }); - it(`txs by ${v.label} address (after)`, async () => { + it(`txs by ${v.label} addr (after)`, async () => { const one = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 3}); assert.strictEqual(one.length, 3); + for (let i = 0; i < 3; i++) + assert(confirmed.includes(one[i].hash)); + + // The after hash is within the + // confirmed transactions. const hash = one[2].hash; const two = await nclient.request( @@ -422,10 +534,10 @@ describe('Indexer', function() { 'GET', `/tx/address/${v.addr}`, {limit: 6}); assert.strictEqual(one.length, 3); - assert.deepEqual(one.concat(two), all); + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); }); - it(`txs by ${v.label} address (after w/ unconf)`, async () => { + it(`txs by ${v.label} addr (after w/ unconf)`, async () => { const one = await nclient.request( 'GET', `/tx/address/${v.addr}`, {limit: 11}); assert.strictEqual(one.length, 11); @@ -449,84 +561,191 @@ describe('Indexer', function() { 'GET', `/tx/address/${v.addr}`, {limit: 12}); assert.strictEqual(all.length, 12); - assert.deepEqual(one.concat(two), all); + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); }); - it(`txs by ${v.label} address (after, reverse)`, async () => { + it(`txs by ${v.label} addr (after w/ unconf 2)`, async () => { const one = await nclient.request( - 'GET', `/tx/address/${v.addr}`, - {limit: 5, reverse: true}); + 'GET', `/tx/address/${v.addr}`, {limit: 12}); + assert.strictEqual(one.length, 12); - assert.strictEqual(one.length, 5); + for (let i = 0; i < 10; i++) + assert(confirmed.includes(one[i].hash)); + + for (let i = 10; i < 12; i++) + assert(unconfirmed.includes(one[i].hash)); + + const hash = one[11].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 10}); + assert.strictEqual(two.length, 3); for (let i = 0; i < 3; i++) + assert(unconfirmed.includes(two[i].hash)); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 100}); + assert.strictEqual(all.length, 15); + + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); + }); + + it(`txs by ${v.label} addr (after w/ unconf 3)`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 13}); + assert.strictEqual(one.length, 13); + + for (let i = 0; i < 10; i++) + assert(confirmed.includes(one[i].hash)); + + for (let i = 10; i < 13; i++) assert(unconfirmed.includes(one[i].hash)); - for (let i = 3; i < 5; i++) + const hash = one[12].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 1}); + assert.strictEqual(two.length, 1); + assert(unconfirmed.includes(two[0].hash)); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, {limit: 14}); + assert.strictEqual(all.length, 14); + + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); + }); + + it(`txs by ${v.label} addr (after, reverse)`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 8, reverse: true}); + + assert.strictEqual(one.length, 8); + + for (let i = 0; i < 5; i++) + assert(unconfirmed.includes(one[i].hash)); + + for (let i = 5; i < 8; i++) assert(confirmed.includes(one[i].hash)); // The after hash is within the // confirmed transactions. - const hash = one[4].hash; + const hash = one[7].hash; const two = await nclient.request( 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 3, reverse: true}); assert.strictEqual(two.length, 3); + for (let i = 0; i < 3; i++) assert(confirmed.includes(two[i].hash)); const all = await nclient.request( 'GET', `/tx/address/${v.addr}`, - {limit: 8, reverse: true}); + {limit: 11, reverse: true}); - assert.strictEqual(all.length, 8); + assert.strictEqual(all.length, 11); - for (let i = 0; i < 3; i++) + for (let i = 0; i < 5; i++) assert(unconfirmed.includes(all[i].hash)); - for (let i = 3; i < 8; i++) + for (let i = 5; i < 11; i++) assert(confirmed.includes(all[i].hash)); - assert.deepEqual(one.concat(two), all); + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); }); - it(`txs by ${v.label} address (after, reverse w/ unconf)`, async () => { + it(`txs by ${v.label} addr (after, reverse w/ unconf)`, async () => { const one = await nclient.request( 'GET', `/tx/address/${v.addr}`, - {limit: 3, reverse: true}); + {limit: 5, reverse: true}); - assert.strictEqual(one.length, 3); - for (let i = 0; i < 3; i++) + assert.strictEqual(one.length, 5); + for (let i = 0; i < 5; i++) assert(unconfirmed.includes(one[i].hash)); // The after hash is within the // unconfirmed transactions. - const hash = one[2].hash; + const hash = one[4].hash; const two = await nclient.request( 'GET', `/tx/address/${v.addr}`, {after: hash, limit: 3, reverse: true}); assert.strictEqual(two.length, 3); + for (let i = 0; i < 3; i++) assert(confirmed.includes(two[i].hash)); const all = await nclient.request( 'GET', `/tx/address/${v.addr}`, - {limit: 6, reverse: true}); + {limit: 8, reverse: true}); - assert.strictEqual(all.length, 6); + assert.strictEqual(all.length, 8); - for (let i = 0; i < 3; i++) + for (let i = 0; i < 5; i++) assert(unconfirmed.includes(all[i].hash)); - for (let i = 3; i < 6; i++) + for (let i = 5; i < 8; i++) assert(confirmed.includes(all[i].hash)); - assert.deepEqual(one.concat(two), all); + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); + }); + + it(`txs by ${v.label} addr (after, reverse w/ unconf 2)`, async () => { + const one = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 3, reverse: true}); + + assert.strictEqual(one.length, 3); + for (let i = 0; i < 3; i++) + assert(unconfirmed.includes(one[i].hash)); + + const hash = one[2].hash; + + const two = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {after: hash, limit: 1, reverse: true}); + + assert.strictEqual(two.length, 1); + assert(unconfirmed.includes(two[0].hash)); + + const all = await nclient.request( + 'GET', `/tx/address/${v.addr}`, + {limit: 4, reverse: true}); + + assert.strictEqual(all.length, 4); + + for (let i = 0; i < 4; i++) + assert(unconfirmed.includes(all[i].hash)); + + assert.deepEqual(sanitize(one.concat(two)), sanitize(all)); }); } + + describe('Errors', function() { + it('will give error if limit is exceeded', async () => { + await assert.asyncThrows(async () => { + await nclient.request( + 'GET', `/tx/address/${vectors[0].addr}`, {limit: 101}); + }, 'Limit above max'); + }); + + it('will give error with invalid after hash', async () => { + await assert.asyncThrows(async () => { + await nclient.request( + 'GET', `/tx/address/${vectors[0].addr}`, {after: 'deadbeef'}); + }); + }); + + it('will give error with invalid reverse', async () => { + await assert.asyncThrows(async () => { + await nclient.request( + 'GET', `/tx/address/${vectors[0].addr}`, {reverse: 'sure'}); + }); + }); + }); }); }); diff --git a/test/mempool-test.js b/test/mempool-test.js index 0f02c0237..dbe19b377 100644 --- a/test/mempool-test.js +++ b/test/mempool-test.js @@ -9,6 +9,7 @@ const common = require('../lib/blockchain/common'); const Block = require('../lib/primitives/block'); const MempoolEntry = require('../lib/mempool/mempoolentry'); const Mempool = require('../lib/mempool/mempool'); +const AddrIndexer = require('../lib/mempool/addrindexer'); const WorkerPool = require('../lib/workers/workerpool'); const Chain = require('../lib/blockchain/chain'); const MTX = require('../lib/primitives/mtx'); @@ -18,6 +19,7 @@ const Address = require('../lib/primitives/address'); const Outpoint = require('../lib/primitives/outpoint'); const Input = require('../lib/primitives/input'); const Script = require('../lib/script/script'); +const Opcode = require('../lib/script/opcode'); const opcodes = Script.opcodes; const Witness = require('../lib/script/witness'); const MemWallet = require('./util/memwallet'); @@ -780,6 +782,40 @@ describe('Mempool', function() { }); }); + describe('AddrIndexer', function () { + it('will not get key for witness program v1', function() { + const addrindex = new AddrIndexer(); + + // Create a witness program version 1 with + // 40 byte data push. + const script = new Script(); + script.push(Opcode.fromSmall(1)); + script.push(Opcode.fromData(Buffer.alloc(40))); + script.compile(); + const addr = Address.fromScript(script); + + const key = addrindex.getKey(addr); + + assert.strictEqual(key, null); + }); + + it('will get key for witness program v0', function() { + const addrindex = new AddrIndexer(); + + // Create a witness program version 0 with + // 32 byte data push. + const script = new Script(); + script.push(Opcode.fromSmall(0)); + script.push(Opcode.fromData(Buffer.alloc(32))); + script.compile(); + const addr = Address.fromScript(script); + + const key = addrindex.getKey(addr); + + assert.bufferEqual(key, Buffer.from('0a' + '00'.repeat(32), 'hex')); + }); + }); + describe('Mempool persistent cache', function () { const workers = new WorkerPool({ enabled: true diff --git a/test/util/assert.js b/test/util/assert.js index e1bdcb3ea..772a52704 100644 --- a/test/util/assert.js +++ b/test/util/assert.js @@ -115,8 +115,11 @@ assert.asyncThrows = async function asyncThrows(func, expectedError) { } catch (e) { err = e; } - const re = new RegExp('^' + expectedError); - assert(re.test(err.message)); + assert(err, 'Expected error.'); + if (expectedError) { + const re = new RegExp('^' + expectedError); + assert(re.test(err.message), err.message); + } }; function _isString(value, message, stackStartFunction) { From 865f7401ba7d9aff9bc1c7c93a52fc70f5f23a93 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 18 Apr 2019 17:49:31 -0700 Subject: [PATCH 27/40] test: add test for retroactively enabling indexer --- test/indexer-test.js | 102 +++++++++++++++++++++++++++++++++++++++---- 1 file changed, 94 insertions(+), 8 deletions(-) diff --git a/test/indexer-test.js b/test/indexer-test.js index 7249a1605..019faf58b 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -19,7 +19,13 @@ const FullNode = require('../lib/node/fullnode'); const Network = require('../lib/protocol/network'); const network = Network.get('regtest'); const {NodeClient, WalletClient} = require('bclient'); -const {forValue} = require('./util/common'); +const {forValue, testdir, rimraf} = require('./util/common'); + +const ports = { + p2p: 49331, + node: 49332, + wallet: 49333 +}; const vectors = [ // Secret for the public key vectors: @@ -302,7 +308,7 @@ describe('Indexer', function() { }); }); - describe('Rescan and reorg', function() { + describe('Reorg and rescan', function() { it('should rescan and reindex 10 missed blocks', async () => { for (let i = 0; i < 10; i++) { const block = await cpu.mineBlock(); @@ -337,6 +343,92 @@ describe('Indexer', function() { assert.bufferEqual(meta.tx.hash(), hash); } }); + + describe('Integration', function() { + const prefix = testdir('indexer'); + + beforeEach(async () => { + await rimraf(prefix); + }); + + after(async () => { + await rimraf(prefix); + }); + + it('will enable indexes retroactively', async () => { + let node, nclient = null; + + try { + node = new FullNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + indexTX: false, + indexAddress: false, + port: ports.p2p, + httpPort: ports.node + }); + + await node.ensure(); + await node.open(); + + nclient = new NodeClient({ + port: ports.node, + apiKey: 'foo', + timeout: 120000 + }); + + await nclient.open(); + + const blocks = await nclient.execute( + 'generatetoaddress', [150, vectors[0].addr]); + + assert.equal(blocks.length, 150); + + await forValue(node.chain, 'height', 150); + + const info = await nclient.request('GET', '/'); + + assert.equal(info.chain.height, 150); + assert.equal(info.indexes.addr.enabled, false); + assert.equal(info.indexes.addr.height, 0); + assert.equal(info.indexes.tx.enabled, false); + assert.equal(info.indexes.tx.height, 0); + } finally { + if (nclient) + await nclient.close(); + + if (node) + await node.close(); + } + + try { + node = new FullNode({ + prefix: prefix, + network: 'regtest', + memory: false, + indexTX: true, + indexAddress: false, + port: ports.p2p, + httpPort: ports.node + }); + + await node.ensure(); + await node.open(); + + assert(node.txindex); + assert.equal(node.txindex.height, 0); + + node.txindex.sync(); + + await forValue(node.txindex, 'height', 150); + } finally { + if (node) + await node.close(); + } + }); + }); }); describe('HTTP', function() { @@ -347,12 +439,6 @@ describe('Indexer', function() { const confirmed = []; const unconfirmed = []; - const ports = { - p2p: 49331, - node: 49332, - wallet: 49333 - }; - function sanitize(txs) { return txs.map((tx) => { // Remove mtime from the results for deep From efb25515558b0265582085382a2752e9c3a0adc4 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Fri, 19 Apr 2019 14:08:27 -0700 Subject: [PATCH 28/40] indexer: fix reset and reorg handling --- lib/blockchain/chaindb.js | 28 +----- lib/indexer/addrindexer.js | 12 +-- lib/indexer/indexer.js | 163 +++++++++++++++---------------- lib/indexer/txindexer.js | 18 ++-- lib/net/pool.js | 8 +- test/indexer-test.js | 195 +++++++++++++++++++++++++++++++++++++ test/util/common.js | 2 +- 7 files changed, 291 insertions(+), 135 deletions(-) diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 2ca1448ad..184afbf12 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -1392,10 +1392,6 @@ class ChainDB { await this.commit(); - // Remove undo data _after_ successful commit. - if (this.blocks) - await this.blocks.pruneUndo(entry.hash); - return view; } @@ -1512,12 +1508,6 @@ class ChainDB { await this.commit(); - // Remove block and undo data _after_ successful commit. - if (this.blocks) { - await this.blocks.pruneUndo(tip.hash); - await this.blocks.prune(tip.hash); - } - // Update caches _after_ successful commit. this.cacheHeight.remove(tip.height); this.cacheHash.remove(tip.hash); @@ -1541,23 +1531,15 @@ class ChainDB { // one giant atomic write! this.start(); - let hashes = []; - try { for (const tip of tips) - hashes = hashes.concat(await this._removeChain(tip)); + await this._removeChain(tip); } catch (e) { this.drop(); throw e; } await this.commit(); - - // SPV doesn't store blocks. - if (this.blocks) { - for (const hash of hashes) - await this.blocks.prune(hash); - } } /** @@ -1575,8 +1557,6 @@ class ChainDB { this.logger.debug('Removing alternate chain: %h.', tip.hash); - const hashes = []; - for (;;) { if (await this.isMainChain(tip)) break; @@ -1588,10 +1568,6 @@ class ChainDB { this.del(layout.h.encode(tip.hash)); this.del(layout.e.encode(tip.hash)); - // Queue block to be pruned on - // successful write. - hashes.push(tip.hash); - // Queue up hash to be removed // on successful write. this.cacheHash.unpush(tip.hash); @@ -1599,8 +1575,6 @@ class ChainDB { tip = await this.getPrevious(tip); assert(tip); } - - return hashes; } /** diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index 192581eff..cdcd78dd5 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -125,13 +125,13 @@ class AddrIndexer extends Indexer { /** * Index transactions by address. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block * @param {CoinView} view */ - async indexBlock(entry, block, view) { - const height = entry.height; + async indexBlock(meta, block, view) { + const height = meta.height; for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; @@ -163,13 +163,13 @@ class AddrIndexer extends Indexer { /** * Remove addresses from index. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block * @param {CoinView} view */ - async unindexBlock(entry, block, view) { - const height = entry.height; + async unindexBlock(meta, block, view) { + const height = meta.height; for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 7c092cf24..d16f8dc90 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -166,29 +166,19 @@ class Indexer extends EventEmitter { */ bind() { - this.chain.on('connect', async (entry, block, view) => { - try { - await this.sync(entry, block, view); - } catch (e) { - this.emit('error', e); - } - }); + const listener = async (entry, block, view) => { + const meta = new BlockMeta(entry.hash, entry.height); - this.chain.on('disconnect', async (entry, block, view) => { try { - await this.sync(entry, block, view); + await this.sync(meta, block, view); } catch (e) { this.emit('error', e); } - }); + }; - this.chain.on('reset', async (tip) => { - try { - await this.sync(tip); - } catch (e) { - this.emit('error', e); - } - }); + this.chain.on('connect', listener); + this.chain.on('disconnect', listener); + this.chain.on('reset', listener); } /** @@ -226,26 +216,31 @@ class Indexer extends EventEmitter { /** * Sync with the chain. - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block * @param {CoinView} view * @returns {Promise} */ - async sync(entry, block, view) { + async sync(meta, block, view) { if (this.syncing) return; this.syncing = true; - const connected = await this._syncBlock(entry, block, view); + const connected = await this._syncBlock(meta, block, view); if (connected) { this.syncing = false; } else { (async () => { - await this._syncChain(entry); - this.syncing = false; + try { + await this._syncChain(); + } catch (e) { + this.emit('error', e); + } finally { + this.syncing = false; + } })(); } } @@ -253,23 +248,23 @@ class Indexer extends EventEmitter { /** * Sync with the chain with a block. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block * @param {CoinView} view * @returns {Promise} */ - async _syncBlock(entry, block, view) { + async _syncBlock(meta, block, view) { // In the case that the next block is being // connected or the current block disconnected // use the block and view being passed directly, // instead of reading that information again. - if (entry && block && view) { - if (entry.height === this.height + 1) { - await this._addBlock(entry, block, view); + if (meta && block && view) { + if (meta.height === this.height + 1) { + await this._addBlock(meta, block, view); return true; - } else if (entry.height === this.height) { - await this._removeBlock(entry, block, view); + } else if (meta.height === this.height) { + await this._removeBlock(meta, block, view); return true; } } @@ -279,11 +274,10 @@ class Indexer extends EventEmitter { /** * Sync with the chain. * @private - * @param {ChainEntry} entry * @returns {Promise} */ - async _syncChain(entry) { + async _syncChain() { let height = this.height; // In the case that the indexer has never @@ -297,23 +291,16 @@ class Indexer extends EventEmitter { // leave chain in a different state. // Scan chain backwards until we // find a common height. - for (;;) { - const tip = await this.getBlockMeta(height); - assert(tip); + while (height > 0) { + const meta = await this.getBlockMeta(height); + assert(meta); - if (await this.getEntry(tip.hash)) + if (await this.getEntry(meta.hash)) break; - assert(height !== 0); height -= 1; } - // In the case that the chain is reset - // the entry will be less than the - // current height. - if (entry && entry.height < height) - height = entry.height; - if (height < this.height) { await this._rollback(height); await this._rollforward(); @@ -329,20 +316,22 @@ class Indexer extends EventEmitter { */ async _rollforward() { - this.logger.info('Indexing to best height.'); + this.logger.info('Indexing to best height from height (%d).', this.height); - for (let i = this.height + 1; ; i++) { - const entry = await this.getEntry(i); + for (let height = this.height + 1; ; height++) { + const entry = await this.getEntry(height); if (!entry) break; + const meta = new BlockMeta(entry.hash, height); + const block = await this.chain.getBlock(entry.hash); assert(block); const view = await this.chain.getBlockView(block); assert(view); - await this._addBlock(entry, block, view); + await this._addBlock(meta, block, view); } } @@ -362,50 +351,46 @@ class Indexer extends EventEmitter { this.logger.info('Rolling back to height %d.', height); - while (this.height > height) { - const tip = await this.getBlockMeta(this.height); - assert(tip); + while (this.height > height && this.height > 1) { + const meta = await this.getBlockMeta(this.height); + assert(meta); - const entry = await this.chain.getEntry(tip.hash); - assert(entry); - - const block = await this.chain.getBlock(entry.hash); + const block = await this.chain.getBlock(meta.hash); assert(block); const view = await this.chain.getBlockView(block); assert(view); - await this._removeBlock(entry, block, view); + await this._removeBlock(meta, block, view); } } /** * Add a block's transactions without a lock. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block + * @param {CoinView} view * @returns {Promise} */ - async _addBlock(entry, block, view) { + async _addBlock(meta, block, view) { assert(block.hasRaw(), 'Expected raw data for block.'); const start = util.bench(); - if (entry.height !== this.height + 1) + if (meta.height !== this.height + 1) throw new Error('Indexer: Can not add block.'); - const tip = new BlockMeta(entry.hash, entry.height); - // Start the batch write. this.start(); // Call the implemented indexer to add to // the batch write. - await this.indexBlock(entry, block, view); + await this.indexBlock(meta, block, view); // Sync the height to the new tip. - const height = await this._setTip(tip); + const height = await this._setTip(meta); // Commit the write batch to disk. await this.commit(); @@ -414,56 +399,58 @@ class Indexer extends EventEmitter { this.height = height; // Log the current indexer status. - this.logStatus(start, block, entry); + this.logStatus(start, block, meta); } /** * Process block indexing * Indexers will implement this method to process the block for indexing - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block + * @param {CoinView} view * @returns {Promise} */ - async indexBlock(entry, block, view) { + async indexBlock(meta, block, view) { ; } /** * Undo block indexing * Indexers will implement this method to undo indexing for the block - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block + * @param {CoinView} view * @returns {Promise} */ - async unindexBlock(entry, block, view) { + async unindexBlock(meta, block, view) { ; } /** * Unconfirm a block's transactions. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta + * @param {Block} block + * @param {CoinView} view * @returns {Promise} */ - async _removeBlock(entry, block, view) { + async _removeBlock(meta, block, view) { const start = util.bench(); - if (entry.height !== this.height) + if (meta.height !== this.height) throw new Error('Indexer: Can not remove block.'); - const tip = new BlockMeta(entry.hash, entry.height); - // Start the batch write. this.start(); // Call the implemented indexer to add to // the batch write. - await this.unindexBlock(entry, block, view); + await this.unindexBlock(meta, block, view); - const prev = await this.getBlockMeta(tip.height - 1); + const prev = await this.getBlockMeta(meta.height - 1); assert(prev); // Sync the height to the previous tip. @@ -476,7 +463,7 @@ class Indexer extends EventEmitter { this.height = height; // Log the current indexer status. - this.logStatus(start, block, entry); + this.logStatus(start, block, meta, true); } /** @@ -485,21 +472,21 @@ class Indexer extends EventEmitter { * @returns {Promise} */ - async _setTip(tip) { - if (tip.height < this.height) { - assert(tip.height === this.height - 1); + async _setTip(meta) { + if (meta.height < this.height) { + assert(meta.height === this.height - 1); this.del(layout.h.encode(this.height)); - } else if (tip.height > this.height) { - assert(tip.height === this.height + 1); + } else if (meta.height > this.height) { + assert(meta.height === this.height + 1); } // Add to batch write to save tip and height. - this.put(layout.h.encode(tip.height), tip.hash); + this.put(layout.h.encode(meta.height), meta.hash); - const raw = bio.write(4).writeU32(tip.height).render(); + const raw = bio.write(4).writeU32(meta.height).render(); this.put(layout.R.encode(), raw); - return tip.height; + return meta.height; } /** @@ -523,18 +510,22 @@ class Indexer extends EventEmitter { * @private * @param {Array} start * @param {Block} block - * @param {ChainEntry} entry + * @param {BlockMeta} meta + * @param {Boolean} reverse */ - logStatus(start, block, entry) { + logStatus(start, block, meta, reverse) { if (!this.isSlow()) return; const elapsed = util.bench(start); + const msg = reverse ? 'removed from' : 'added to'; + this.logger.info( - 'Block (%d) added to indexer (txs=%d time=%d).', - entry.height, + 'Block (%d) %s indexer (txs=%d time=%d).', + meta.height, + msg, block.txs.length, elapsed); } diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index 4c1d5eb09..64a5f3acf 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -180,18 +180,18 @@ class TXIndexer extends Indexer { /** * Index transactions by txid. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block * @param {CoinView} view */ - async indexBlock(entry, block, view) { + async indexBlock(meta, block, view) { const brecord = new BlockRecord({ - block: entry.hash, - time: entry.time + block: meta.hash, + time: block.time }); - this.put(layout.b.encode(entry.height), brecord.toRaw()); + this.put(layout.b.encode(meta.height), brecord.toRaw()); for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; @@ -200,7 +200,7 @@ class TXIndexer extends Indexer { const {offset, size} = tx.getPosition(); const txrecord = new TxRecord({ - height: entry.height, + height: meta.height, index: i, offset: offset, length: size @@ -213,13 +213,13 @@ class TXIndexer extends Indexer { /** * Remove transactions from index. * @private - * @param {ChainEntry} entry + * @param {BlockMeta} meta * @param {Block} block * @param {CoinView} view */ - async unindexBlock(entry, block, view) { - this.del(layout.b.encode(entry.height)); + async unindexBlock(meta, block, view) { + this.del(layout.b.encode(meta.height)); for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; diff --git a/lib/net/pool.js b/lib/net/pool.js index 82d31c9cc..834fe8aa8 100644 --- a/lib/net/pool.js +++ b/lib/net/pool.js @@ -643,11 +643,9 @@ class Pool extends EventEmitter { */ startSync() { - if (!this.opened) + if (!this.opened || !this.connected) return; - assert(this.connected, 'Pool is not connected!'); - this.syncing = true; this.resync(false); } @@ -657,11 +655,9 @@ class Pool extends EventEmitter { */ forceSync() { - if (!this.opened) + if (!this.opened || !this.connected) return; - assert(this.connected, 'Pool is not connected!'); - this.resync(true); } diff --git a/test/indexer-test.js b/test/indexer-test.js index 019faf58b..021f8100f 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -420,10 +420,205 @@ describe('Indexer', function() { assert(node.txindex); assert.equal(node.txindex.height, 0); + node.startSync(); + + await forValue(node.txindex, 'height', 150); + } finally { + if (node) + await node.close(); + } + }); + + it('will sync if disabled during reorganization', async () => { + let node, nclient, wclient = null; + + try { + // Generate initial set of blocks that are are spending + // coins and therefore data in undo blocks. + node = new FullNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + indexTX: true, + indexAddress: false, + port: ports.p2p, + httpPort: ports.node, + plugins: [require('../lib/wallet/plugin')], + env: { + 'BCOIN_WALLET_HTTP_PORT': ports.wallet.toString() + }, + logLevel: 'none' + }); + + await node.ensure(); + await node.open(); + + nclient = new NodeClient({ + port: ports.node, + apiKey: 'foo', + timeout: 120000 + }); + + await nclient.open(); + + wclient = new WalletClient({ + port: ports.wallet, + apiKey: 'foo', + timeout: 120000 + }); + + await wclient.open(); + + const coinbase = await wclient.execute( + 'getnewaddress', ['default']); + + const blocks = await nclient.execute( + 'generatetoaddress', [150, coinbase]); + + assert.equal(blocks.length, 150); + + for (let i = 0; i < 10; i++) { + for (const v of vectors) + await wclient.execute('sendtoaddress', [v.addr, v.amount]); + + const blocks = await nclient.execute( + 'generatetoaddress', [1, coinbase]); + + assert.equal(blocks.length, 1); + } + + await forValue(node.chain, 'height', 160); + await forValue(node.txindex, 'height', 160); + } finally { + if (wclient) + await wclient.close(); + + if (nclient) + await nclient.close(); + + if (node) + await node.close(); + } + + try { + // Now create a reorganization in the chain while + // the indexer is disabled. + node = new FullNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + indexTX: false, + indexAddress: false, + port: ports.p2p, + httpPort: ports.node, + logLevel: 'none' + }); + + await node.ensure(); + await node.open(); + + nclient = new NodeClient({ + port: ports.node, + apiKey: 'foo', + timeout: 120000 + }); + + await nclient.open(); + + for (let i = 0; i < 10; i++) { + const hash = await nclient.execute('getbestblockhash'); + await nclient.execute('invalidateblock', [hash]); + } + + await forValue(node.chain, 'height', 150); + + const blocks = await nclient.execute( + 'generatetoaddress', [20, vectors[0].addr]); + + assert.equal(blocks.length, 20); + + await forValue(node.chain, 'height', 170); + } finally { + if (nclient) + await nclient.close(); + + if (node) + await node.close(); + } + + try { + // Now turn the indexer back on and check that it + // is able to disconnect blocks and add the new blocks. + node = new FullNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + indexTX: true, + indexAddress: false, + port: ports.p2p, + httpPort: ports.node, + logLevel: 'none' + }); + + await node.ensure(); + await node.open(); + + assert(node.txindex); + assert.equal(node.txindex.height, 160); + node.txindex.sync(); + await forValue(node.txindex, 'height', 170, 5000); + } finally { + if (node) + await node.close(); + } + }); + + it('will reset indexes', async () => { + let node, nclient = null; + + try { + node = new FullNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + indexTX: true, + indexAddress: false, + port: ports.p2p, + httpPort: ports.node, + logLevel: 'none' + }); + + await node.ensure(); + await node.open(); + + nclient = new NodeClient({ + port: ports.node, + apiKey: 'foo', + timeout: 120000 + }); + + await nclient.open(); + + const blocks = await nclient.execute( + 'generatetoaddress', [150, vectors[0].addr]); + + assert.equal(blocks.length, 150); + await forValue(node.txindex, 'height', 150); + + await node.chain.reset(0); + + await forValue(node.txindex, 'height', 1); } finally { + if (nclient) + await nclient.close(); + if (node) await node.close(); } diff --git a/test/util/common.js b/test/util/common.js index 3fd9f501a..047601295 100644 --- a/test/util/common.js +++ b/test/util/common.js @@ -102,7 +102,7 @@ common.rimraf = async function(p) { return await fs.rimraf(p); }; -common.forValue = async function(obj, key, val, timeout = 60000) { +common.forValue = async function(obj, key, val, timeout = 30000) { assert(typeof obj === 'object'); assert(typeof key === 'string'); From 5d18f9ba2e8a6554a33be2dc25c48f32abeb1261 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 23 Apr 2019 13:01:19 -0700 Subject: [PATCH 29/40] indexer: index the genesis block --- lib/indexer/indexer.js | 25 +++++++++++++++++++++++++ test/indexer-test.js | 16 ++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index d16f8dc90..5a6b51f46 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -15,6 +15,8 @@ const Logger = require('blgr'); const Network = require('../protocol/network'); const util = require('../utils/util'); const layout = require('./layout'); +const CoinView = require('../coins/coinview'); +const Block = require('../primitives/block'); const {ZERO_HASH} = require('../protocol/consensus'); /** @@ -111,6 +113,8 @@ class Indexer extends EventEmitter { const data = await this.db.get(layout.R.encode()); if (data) this.height = bio.readU32(data, 0); + else + await this.saveGenesis(); // Bind to chain events. this.bind(); @@ -160,6 +164,27 @@ class Indexer extends EventEmitter { throw new Error('Indexer: Network mismatch.'); } + /** + * A special case for indexing the genesis block. The genesis + * block coins are not spendable, however indexers can still index + * the block for historical and informational purposes. + * @private + * @returns {Promise} + */ + + async saveGenesis() { + this.start(); + + const block = Block.fromRaw(Buffer.from(this.network.genesisBlock, 'hex')); + const meta = new BlockMeta(block.hash(), 0); + + await this.indexBlock(meta, block, new CoinView()); + await this._setTip(meta); + await this.commit(); + + this.height = 0; + } + /** * Bind to chain events. * @private diff --git a/test/indexer-test.js b/test/indexer-test.js index 021f8100f..699c48136 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -8,6 +8,7 @@ const reorg = require('./util/reorg'); const Script = require('../lib/script/script'); const Opcode = require('../lib/script/opcode'); const Address = require('../lib/primitives/address'); +const Block = require('../lib/primitives/block'); const Chain = require('../lib/blockchain/chain'); const WorkerPool = require('../lib/workers/workerpool'); const Miner = require('../lib/mining/miner'); @@ -306,6 +307,21 @@ describe('Indexer', function() { assert.strictEqual(tx, null); assert.strictEqual(meta, null); }); + + it('should get unspendable genesis tx', async () => { + const block = Block.fromRaw(Buffer.from(network.genesisBlock, 'hex')); + const hash = block.txs[0].hash(); + + const tx = await txindexer.getTX(hash); + const meta = await txindexer.getMeta(hash); + + assert(meta); + assert.equal(meta.height, 0); + assert(meta.block); + assert(meta.time); + + assert.deepEqual(meta.tx, tx); + }); }); describe('Reorg and rescan', function() { From cede31d86f7864abfafb90b2e5dab070d24457b3 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 23 Apr 2019 13:42:21 -0700 Subject: [PATCH 30/40] indexer: cleanup and check pruned and index options --- lib/blockchain/chain.js | 12 ------------ lib/blockchain/chaindb.js | 26 ------------------------- lib/indexer/indexer.js | 1 + lib/node/fullnode.js | 2 ++ test/indexer-test.js | 40 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 43 insertions(+), 38 deletions(-) diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index 19cfea935..aa546681f 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -2687,8 +2687,6 @@ class ChainOptions { this.bip91 = false; this.bip148 = false; this.prune = false; - this.indexTX = false; - this.indexAddress = false; this.forceFlags = false; this.entryCache = 5000; @@ -2766,16 +2764,6 @@ class ChainOptions { this.compression = options.compression; } - if (options.indexTX != null) { - assert(typeof options.indexTX === 'boolean'); - this.indexTX = options.indexTX; - } - - if (options.indexAddress != null) { - assert(typeof options.indexAddress === 'boolean'); - this.indexAddress = options.indexAddress; - } - if (options.prune != null) { assert(typeof options.prune === 'boolean'); this.prune = options.prune; diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 184afbf12..24dc6846e 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -571,12 +571,6 @@ class ChainDB { if (!options.prune && flags.prune) throw new Error('Cannot retroactively unprune.'); - if (options.prune && options.indexTX && !flags.indexTX) - throw new Error('Cannot retroactively enable TX indexing.'); - - if (options.prune && options.indexAddress && !flags.indexAddress) - throw new Error('Cannot retroactively enable address indexing.'); - if (needsSave) { await this.logger.info('Rewriting chain flags.'); await this.saveFlags(); @@ -1801,8 +1795,6 @@ class ChainFlags { this.bip91 = false; this.bip148 = false; this.prune = false; - this.indexTX = false; - this.indexAddress = false; if (options) this.fromOptions(options); @@ -1831,16 +1823,6 @@ class ChainFlags { this.prune = options.prune; } - if (options.indexTX != null) { - assert(typeof options.indexTX === 'boolean'); - this.indexTX = options.indexTX; - } - - if (options.indexAddress != null) { - assert(typeof options.indexAddress === 'boolean'); - this.indexAddress = options.indexAddress; - } - return this; } @@ -1862,12 +1844,6 @@ class ChainFlags { if (this.prune) flags |= 1 << 2; - if (this.indexTX) - flags |= 1 << 3; - - if (this.indexAddress) - flags |= 1 << 4; - if (this.bip91) flags |= 1 << 5; @@ -1891,8 +1867,6 @@ class ChainFlags { this.spv = (flags & 1) !== 0; this.witness = (flags & 2) !== 0; this.prune = (flags & 4) !== 0; - this.indexTX = (flags & 8) !== 0; - this.indexAddress = (flags & 16) !== 0; this.bip91 = (flags & 32) !== 0; this.bip148 = (flags & 64) !== 0; diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 5a6b51f46..111877719 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -612,6 +612,7 @@ class IndexOptions { 'Indexer requires a blockstore.'); assert(options.chain && typeof options.chain === 'object', 'Indexer requires chain.'); + assert(!options.prune, 'Can not index while pruned.'); this.blocks = options.blocks; this.chain = options.chain; diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 0a3612fc0..2b77a7c05 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -164,6 +164,7 @@ class FullNode extends Node { logger: this.logger, blocks: this.blocks, chain: this.chain, + prune: this.config.bool('prune'), memory: this.config.bool('memory'), prefix: this.config.filter('index').str('prefix') || this.config.prefix }); @@ -175,6 +176,7 @@ class FullNode extends Node { logger: this.logger, blocks: this.blocks, chain: this.chain, + prune: this.config.bool('prune'), memory: this.config.bool('memory'), prefix: this.config.filter('index').str('prefix') || this.config.prefix, maxTxs: this.config.uint('max-txs') diff --git a/test/indexer-test.js b/test/indexer-test.js index 699c48136..61b9acb5f 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -17,6 +17,7 @@ const TXIndexer = require('../lib/indexer/txindexer'); const AddrIndexer = require('../lib/indexer/addrindexer'); const BlockStore = require('../lib/blockstore/level'); const FullNode = require('../lib/node/fullnode'); +const SPVNode = require('../lib/node/spvnode'); const Network = require('../lib/protocol/network'); const network = Network.get('regtest'); const {NodeClient, WalletClient} = require('bclient'); @@ -639,6 +640,45 @@ describe('Indexer', function() { await node.close(); } }); + + it('will not index if pruned', async () => { + let err = null; + + try { + new FullNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + prune: true, + indexTX: true, + indexAddress: true, + port: ports.p2p, + httpPort: ports.node + }); + } catch (e) { + err = e; + } + + assert(err); + assert.equal(err.message, 'Can not index while pruned.'); + }); + + it('will not index if spv', async () => { + const node = new SPVNode({ + prefix: prefix, + network: 'regtest', + apiKey: 'foo', + memory: false, + indexTX: true, + indexAddress: true, + port: ports.p2p, + httpPort: ports.node + }); + + assert.equal(node.txindex, undefined); + assert.equal(node.addrindex, undefined); + }); }); }); From ed06c2184ddb18836664c0119e139b0643376269 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 23 Apr 2019 15:55:56 -0700 Subject: [PATCH 31/40] indexer: check that blocks are connected There was a rare case that a block could be incorrectly added to the indexer if the indexer was disabled during a reorg to a height that matched the height that was expected, and the `sync` method for the indexer wasn't called that would detect the reorg. --- lib/indexer/indexer.js | 11 ++++ test/indexer-test.js | 116 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 127 insertions(+) diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 111877719..0628a641e 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -286,9 +286,20 @@ class Indexer extends EventEmitter { // instead of reading that information again. if (meta && block && view) { if (meta.height === this.height + 1) { + // Make sure that the block is connected to + // the indexer chain. + const prev = await this.getBlockMeta(this.height); + if (prev.hash.compare(block.prevBlock) !== 0) + return false; + await this._addBlock(meta, block, view); return true; } else if (meta.height === this.height) { + // Make sure that this is the current block. + const current = await this.getBlockMeta(this.height); + if (current.hash.compare(block.hash()) !== 0) + return false; + await this._removeBlock(meta, block, view); return true; } diff --git a/test/indexer-test.js b/test/indexer-test.js index 61b9acb5f..a05dc8c28 100644 --- a/test/indexer-test.js +++ b/test/indexer-test.js @@ -119,6 +119,122 @@ describe('Indexer', function() { }); describe('Unit', function() { + it('should connect block', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {} + }); + + indexer.height = 9; + + indexer.getBlockMeta = (height) => { + return { + hash: Buffer.alloc(32, 0x00), + height: height + }; + }; + + let called = false; + indexer._addBlock = async () => { + called = true; + }; + + const meta = {height: 10}; + const block = {prevBlock: Buffer.alloc(32, 0x00)}; + const view = {}; + + const connected = await indexer._syncBlock(meta, block, view); + assert.equal(connected, true); + assert.equal(called, true); + }); + + it('should not connect block', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {} + }); + + indexer.height = 9; + + indexer.getBlockMeta = (height) => { + return { + hash: Buffer.alloc(32, 0x02), + height: height + }; + }; + + let called = false; + indexer._addBlock = async () => { + called = true; + }; + + const meta = {height: 10}; + const block = {prevBlock: Buffer.alloc(32, 0x01)}; + const view = {}; + + const connected = await indexer._syncBlock(meta, block, view); + assert.equal(connected, false); + assert.equal(called, false); + }); + + it('should disconnect block', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {} + }); + + indexer.height = 9; + + indexer.getBlockMeta = (height) => { + return { + hash: Buffer.alloc(32, 0x00), + height: height + }; + }; + + let called = false; + indexer._removeBlock = async () => { + called = true; + }; + + const meta = {height: 9}; + const block = {hash: () => Buffer.alloc(32, 0x00)}; + const view = {}; + + const connected = await indexer._syncBlock(meta, block, view); + assert.equal(connected, true); + assert.equal(called, true); + }); + + it('should not disconnect block', async () => { + const indexer = new AddrIndexer({ + blocks: {}, + chain: {} + }); + + indexer.height = 9; + + indexer.getBlockMeta = (height) => { + return { + hash: Buffer.alloc(32, 0x01), + height: height + }; + }; + + let called = false; + indexer._removeBlock = async () => { + called = true; + }; + + const meta = {height: 9}; + const block = {hash: () => Buffer.alloc(32, 0x02)}; + const view = {}; + + const connected = await indexer._syncBlock(meta, block, view); + assert.equal(connected, false); + assert.equal(called, false); + }); + it('should not index transaction w/ invalid address', async () => { const indexer = new AddrIndexer({ blocks: {}, From 5111af06157481e30e4b9c439e558d74ee845c00 Mon Sep 17 00:00:00 2001 From: Javed Khan Date: Thu, 18 Apr 2019 20:33:22 +0530 Subject: [PATCH 32/40] migrate: update latest migration --- migrate/latest | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/migrate/latest b/migrate/latest index 519479b85..dd0e1c138 100755 --- a/migrate/latest +++ b/migrate/latest @@ -1,7 +1,7 @@ #!/usr/bin/env node -const assert = require('assert'); -const fs = require('fs'); +'use strict'; + const cp = require('child_process'); const res = require('path').resolve; const {argv} = process; @@ -13,14 +13,6 @@ if (argv.length < 3) { return; } -function mv(from, to) { - try { - fs.renameSync(from, to); - } catch (e) { - console.error(e.message); - } -} - function exec(file, ...args) { try { const result = cp.spawnSync(file, args, { @@ -39,11 +31,8 @@ function exec(file, ...args) { const node = argv[0]; const prefix = argv[2]; -mv(res(prefix, 'chain.ldb'), res(prefix, 'chain')); -mv(res(prefix, 'spvchain.ldb'), res(prefix, 'spvchain')); -mv(res(prefix, 'mempool.ldb'), res(prefix, 'mempool')); -mv(res(prefix, 'walletdb.ldb'), res(prefix, 'wallet')); - exec(node, res(__dirname, 'chaindb3to4.js'), res(prefix, 'chain')); exec(node, res(__dirname, 'chaindb3to4.js'), res(prefix, 'spvchain')); +exec(node, res(__dirname, 'chaindb4to5.js'), res(prefix, 'chain')); +exec(node, res(__dirname, 'chaindb5to6.js'), res(prefix, 'chain')); exec(node, res(__dirname, 'walletdb6to7.js'), res(prefix, 'wallet')); From 677b45f3d5abb5727938bf4d12b3bab1ba2bf83b Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 23 Apr 2019 17:32:56 -0700 Subject: [PATCH 33/40] migrate: upgrade from chaindb 4 to 6 --- migrate/{chaindb4to5.js => chaindb4to6.js} | 106 +++++++++++++++++---- migrate/chaindb5to6.js | 105 -------------------- migrate/latest | 21 ++-- 3 files changed, 100 insertions(+), 132 deletions(-) rename migrate/{chaindb4to5.js => chaindb4to6.js} (54%) delete mode 100644 migrate/chaindb5to6.js diff --git a/migrate/chaindb4to5.js b/migrate/chaindb4to6.js similarity index 54% rename from migrate/chaindb4to5.js rename to migrate/chaindb4to6.js index b9eb1f0ba..0e9e8b36a 100644 --- a/migrate/chaindb4to5.js +++ b/migrate/chaindb4to6.js @@ -8,8 +8,9 @@ const {resolve} = require('path'); assert(process.argv.length > 2, 'Please pass in a database path.'); -// migration - -// chaindb: leveldb to flat files +// Changes: +// 1. Moves blocks and undo blocks from leveldb to flat files. +// 2. Removes tx and addr indexes from chaindb. const db = bdb.create({ location: process.argv[2], @@ -25,29 +26,33 @@ const blockStore = new FileBlockStore({ location: location }); -async function updateVersion() { - const ver = await checkVersion(); +async function getVersion() { + const data = await db.get(layout.V.encode()); + assert(data, 'No version.'); + + return data.readUInt32LE(5, true); +} + +async function updateVersion(version) { + await checkVersion(version - 1); - console.log('Updating version to %d.', ver + 1); + console.log('Updating version to %d.', version); const buf = Buffer.allocUnsafe(5 + 4); buf.write('chain', 0, 'ascii'); - buf.writeUInt32LE(5, 5, true); + buf.writeUInt32LE(version, 5, true); const parent = db.batch(); parent.put(layout.V.encode(), buf); await parent.write(); } -async function checkVersion() { +async function checkVersion(version) { console.log('Checking version.'); - const data = await db.get(layout.V.encode()); - assert(data, 'No version.'); - - const ver = data.readUInt32LE(5, true); + const ver = await getVersion(); - if (ver !== 4) + if (ver !== version) throw Error(`DB is version ${ver}.`); return ver; @@ -113,26 +118,85 @@ async function migrateBlocks() { await parent.write(); } +async function removeKey(name, key) { + const iter = db.iterator({ + gte: key.min(), + lte: key.max(), + reverse: true, + keys: true + }); + + let batch = db.batch(); + let total = 0; + + while (await iter.next()) { + const {key} = iter; + batch.del(key); + + if (++total % 10000 === 0) { + console.log('Cleaned up %d %s index records.', total, name); + await batch.write(); + batch = db.batch(); + } + } + await batch.write(); + + console.log('Cleaned up %d %s index records.', total, name); +} + +async function migrateIndexes() { + const t = bdb.key('t', ['hash256']); + const T = bdb.key('T', ['hash', 'hash256']); + const C = bdb.key('C', ['hash', 'hash256', 'uint32']); + + await removeKey('hash -> tx', t); + await removeKey('addr -> tx', T); + await removeKey('addr -> coin', C); +} + /* * Execute */ (async () => { await db.open(); - await blockStore.ensure(); - await blockStore.open(); console.log('Opened %s.', process.argv[2]); - await checkVersion(); - await migrateBlocks(); - await migrateUndoBlocks(); - await updateVersion(); + const version = await getVersion(); + let compact = false; + + switch (version) { + case 4: + // Upgrade from version 4 to 5. + await checkVersion(4); + await blockStore.ensure(); + await blockStore.open(); + await migrateBlocks(); + await migrateUndoBlocks(); + await updateVersion(5); + await blockStore.close(); + compact = true; + case 5: + // Upgrade from version 5 to 6. + await checkVersion(5); + await migrateIndexes(); + await updateVersion(6); + compact = true; + break; + case 6: + console.log('Already upgraded.'); + break; + default: + console.log(`DB version is ${version}.`); + } + + if (compact) { + console.log('Compacting database'); + await db.compactRange(); + } - console.log('Compacting database'); - await db.compactRange(); await db.close(); - await blockStore.close(); })().then(() => { console.log('Migration complete.'); process.exit(0); diff --git a/migrate/chaindb5to6.js b/migrate/chaindb5to6.js deleted file mode 100644 index 8a85b607f..000000000 --- a/migrate/chaindb5to6.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; - -const assert = require('assert'); -const bdb = require('bdb'); -const layout = require('../lib/blockchain/layout'); - -// changes: -// removes tx, addr indexes i.e layout.t, layout.T, layout.C - -assert(process.argv.length > 2, 'Please pass in a database path.'); - -const db = bdb.create({ - location: process.argv[2], - memory: false, - compression: true, - cacheSize: 32 << 20, - createIfMissing: false -}); - -async function updateVersion() { - const ver = await checkVersion(); - - console.log('Updating version to %d.', ver + 1); - - const buf = Buffer.allocUnsafe(5 + 4); - buf.write('chain', 0, 'ascii'); - buf.writeUInt32LE(6, 5, true); - - const parent = db.batch(); - parent.put(layout.V.encode(), buf); - await parent.write(); -} - -async function checkVersion() { - console.log('Checking version.'); - - const data = await db.get(layout.V.encode()); - assert(data, 'No version.'); - - const ver = data.readUInt32LE(5, true); - - if (ver !== 5) - throw Error(`DB is version ${ver}.`); - - return ver; -} - -async function removeKey(name, key) { - const iter = db.iterator({ - gte: key.min(), - lte: key.max(), - reverse: true, - keys: true - }); - - let batch = db.batch(); - let total = 0; - - while (await iter.next()) { - const {key} = iter; - batch.del(key); - - if (++total % 10000 === 0) { - console.log('Cleaned up %d %s index records.', total, name); - await batch.write(); - batch = db.batch(); - } - } - await batch.write(); - - console.log('Cleaned up %d %s index records.', total, name); -} - -async function migrateIndexes() { - const t = bdb.key('t', ['hash256']); - const T = bdb.key('T', ['hash', 'hash256']); - const C = bdb.key('C', ['hash', 'hash256', 'uint32']); - - await removeKey('hash -> tx', t); - await removeKey('addr -> tx', T); - await removeKey('addr -> coin', C); -} - -/* - * Execute - */ - -(async () => { - await db.open(); - - console.log('Opened %s.', process.argv[2]); - - await checkVersion(); - await migrateIndexes(); - await updateVersion(); - - await db.compactRange(); - await db.close(); -})().then(() => { - console.log('Migration complete.'); - process.exit(0); -}).catch((err) => { - console.error(err.stack); - process.exit(1); -}); diff --git a/migrate/latest b/migrate/latest index dd0e1c138..b4ebd9810 100755 --- a/migrate/latest +++ b/migrate/latest @@ -3,7 +3,8 @@ 'use strict'; const cp = require('child_process'); -const res = require('path').resolve; +const fs = require('bfile'); +const {resolve} = require('path'); const {argv} = process; if (argv.length < 3) { @@ -31,8 +32,16 @@ function exec(file, ...args) { const node = argv[0]; const prefix = argv[2]; -exec(node, res(__dirname, 'chaindb3to4.js'), res(prefix, 'chain')); -exec(node, res(__dirname, 'chaindb3to4.js'), res(prefix, 'spvchain')); -exec(node, res(__dirname, 'chaindb4to5.js'), res(prefix, 'chain')); -exec(node, res(__dirname, 'chaindb5to6.js'), res(prefix, 'chain')); -exec(node, res(__dirname, 'walletdb6to7.js'), res(prefix, 'wallet')); +const chain = resolve(prefix, 'chain'); +const spvchain = resolve(prefix, 'spvchain'); + +(async () => { + if (await fs.exists(chain)) + exec(node, resolve(__dirname, 'chaindb4to6.js'), chain); + + if (await fs.exists(spvchain)) + exec(node, resolve(__dirname, 'chaindb4to6.js'), spvchain); +})().catch((err) => { + console.error(err.stack); + process.exit(1); +}); From a6fd1bec1ad448d3a86a938f8b217526da8ee6a1 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Tue, 23 Apr 2019 17:47:50 -0700 Subject: [PATCH 34/40] changelog: update with latest migration --- CHANGELOG.md | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e22ca61d4..6afd1c1f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,14 +16,17 @@ efficiency, reliability and portability. block download. - The `addrindex` has been sorted to support querying for large sets of results, and will no longer cause CPU and memory exhaustion issues. +- The `addrindex` will correctly distinguish between `p2pkh` and + `p2wpkh` addresses. To upgrade to the new disk layout it's necessary to move block data from LevelDB (e.g. `~/.bcoin/chain`) to a new file based block -storage (e.g. `~./.bcoin/blocks`). +storage (e.g. `~./.bcoin/blocks`), and remove `txindex` and `addrindex` +data from the chain database, for those that have that feature enabled. To do this you can run: ``` -node ./migrate/chaindb4to5.js /path/to/bcoin/chain +node ./migrate/chaindb4to6.js /path/to/bcoin/chain ``` The migration will take 1-3 hours, depending on hardware. The block data @@ -34,16 +37,10 @@ Alternatively, you can also sync the chain again, however the above migration will be faster as additional network bandwidth won't be used for downloading the blocks again. -For those with `txindex` and `addrindex` enabled there is an additional -step to cleanup and regenerate the indexes. - -``` bash -$ ./migrate/chaindb5to6.js /path/to/bcoin/chain -``` - -The indexes will be regenerated by rescanning the chain on next startup, -this process might take a while. Please take the potential downtime in -re-indexing into account before upgrading. +For those with `txindex` and `addrindex` enabled, the indexes will be +regenerated by rescanning the chain on next startup, this process might +take a while. Please take the potential downtime in re-indexing into +account before upgrading. ### Wallet API changes From ce16bc92391bf18db31c7bc5b4e0eb2babf1d5be Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 15 May 2019 10:56:54 -0700 Subject: [PATCH 35/40] node: simplify index-prefix option --- lib/node/fullnode.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 2b77a7c05..0dff21d3c 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -166,7 +166,7 @@ class FullNode extends Node { chain: this.chain, prune: this.config.bool('prune'), memory: this.config.bool('memory'), - prefix: this.config.filter('index').str('prefix') || this.config.prefix + prefix: this.config.str('index-prefix', this.config.prefix) }); } @@ -178,7 +178,7 @@ class FullNode extends Node { chain: this.chain, prune: this.config.bool('prune'), memory: this.config.bool('memory'), - prefix: this.config.filter('index').str('prefix') || this.config.prefix, + prefix: this.config.str('index-prefix', this.config.prefix), maxTxs: this.config.uint('max-txs') }); } From 372b9f980364c88442a118e7d2361c307ebc33c6 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 15 May 2019 11:01:13 -0700 Subject: [PATCH 36/40] changelog: update with new indexer fields for info endpoint --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6afd1c1f0..1556f3e18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -93,6 +93,7 @@ that have many transactions. Coins could otherwise be removed from results at any point, and thus the entire set of results would need to be queried every time to discover which coins have been spent and are currently available. +- `GET /` has new fields `.indexes.{addr,tx}` for the status of indexers. ### Network changes From 662a68cf101889a7868c6878b5d30074ea7393ed Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Wed, 15 May 2019 11:10:31 -0700 Subject: [PATCH 37/40] blockchain: cleanup deprecated methods Deprecation details are in CHANGELOG.md --- lib/blockchain/chain.js | 84 --------------------------- lib/blockchain/chaindb.js | 118 -------------------------------------- 2 files changed, 202 deletions(-) diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index aa546681f..10adf74b7 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -1948,75 +1948,6 @@ class Chain extends AsyncEmitter { return this.db.getBlockView(block); } - /** - * Get a transaction with metadata. - * @param {Hash} hash - * @returns {Promise} - Returns {@link TXMeta}. - */ - - getMeta(hash) { - return this.db.getMeta(hash); - } - - /** - * Retrieve a transaction. - * @param {Hash} hash - * @returns {Promise} - Returns {@link TX}. - */ - - getTX(hash) { - return this.db.getTX(hash); - } - - /** - * @param {Hash} hash - * @returns {Promise} - Returns Boolean. - */ - - hasTX(hash) { - return this.db.hasTX(hash); - } - - /** - * Get all coins pertinent to an address. - * @param {Address[]} addrs - * @returns {Promise} - Returns {@link Coin}[]. - */ - - getCoinsByAddress(addrs) { - return this.db.getCoinsByAddress(addrs); - } - - /** - * Get all transaction hashes to an address. - * @param {Address[]} addrs - * @returns {Promise} - Returns {@link Hash}[]. - */ - - getHashesByAddress(addrs) { - return this.db.getHashesByAddress(addrs); - } - - /** - * Get all transactions pertinent to an address. - * @param {Address[]} addrs - * @returns {Promise} - Returns {@link TX}[]. - */ - - getTXByAddress(addrs) { - return this.db.getTXByAddress(addrs); - } - - /** - * Get all transactions pertinent to an address. - * @param {Address[]} addrs - * @returns {Promise} - Returns {@link TXMeta}[]. - */ - - getMetaByAddress(addrs) { - return this.db.getMetaByAddress(addrs); - } - /** * Get an orphan block. * @param {Hash} hash @@ -2057,21 +1988,6 @@ class Chain extends AsyncEmitter { return this.db.getCoinView(tx); } - /** - * Get coin viewpoint (spent). - * @param {TXMeta} meta - * @returns {Promise} - Returns {@link CoinView}. - */ - - async getSpentView(meta) { - const unlock = await this.locker.lock(); - try { - return await this.db.getSpentView(meta); - } finally { - unlock(); - } - } - /** * Test the chain to see if it is synced. * @returns {Boolean} diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 24dc6846e..1d12af779 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -962,20 +962,6 @@ class ChainDB { return view; } - /** - * Get coin viewpoint (historical). - * @param {TXMeta} meta - * @returns {Promise} - Returns {@link CoinView}. - */ - - async getSpentView(meta) { - process.emitWarning( - 'deprecated, use node.txindex.getSpentView', - 'DeprecationWarning' - ); - return null; - } - /** * Get coins necessary to be resurrected during a reorg. * @param {Hash} hash @@ -1052,110 +1038,6 @@ class ChainDB { return view; } - /** - * Get a transaction with metadata. - * @param {Hash} hash - * @deprecated - * @returns {Promise} - Returns {@link TXMeta}. - */ - - async getMeta(hash) { - process.emitWarning( - 'deprecated, use node.txindex.getMeta', - 'DeprecationWarning' - ); - return null; - } - - /** - * Retrieve a transaction. - * @param {Hash} hash - * @deprecated - * @returns {Promise} - Returns {@link TX}. - */ - - async getTX(hash) { - process.emitWarning( - 'deprecated, use node.txindex.getTX', - 'DeprecationWarning' - ); - return null; - } - - /** - * @param {Hash} hash - * @deprecated - * @returns {Promise} - Returns Boolean. - */ - - async hasTX(hash) { - process.emitWarning( - 'deprecated, use node.txindex.hasTX', - 'DeprecationWarning' - ); - return false; - } - - /** - * Get all coins pertinent to an address. - * @param {Address[]} addrs - * @deprecated - * @returns {Promise} - Returns {@link Coin}[]. - */ - - async getCoinsByAddress(addrs) { - process.emitWarning( - 'deprecated, use node.addrindex.getCoinsByAddress', - 'DeprecationWarning' - ); - return []; - } - - /** - * Get all transaction hashes to an address. - * @param {Address[]} addrs - * @deprecated - * @returns {Promise} - Returns {@link Hash}[]. - */ - - async getHashesByAddress(addrs) { - process.emitWarning( - 'deprecated, use node.addrindex.getHashesByAddress', - 'DeprecationWarning' - ); - return []; - } - - /** - * Get all transactions pertinent to an address. - * @param {Address[]} addrs - * @deprecated - * @returns {Promise} - Returns {@link TX}[]. - */ - - async getTXByAddress(addrs) { - process.emitWarning( - 'deprecated, use node.addrindex.getHashesByAddress', - 'DeprecationWarning' - ); - return []; - } - - /** - * Get all transactions pertinent to an address. - * @param {Address[]} addrs - * @deprecated - * @returns {Promise} - Returns {@link TXMeta}[]. - */ - - async getMetaByAddress(addrs) { - process.emitWarning( - 'deprecated, use node.addrindex.getMetaByAddress', - 'DeprecationWarning' - ); - return []; - } - /** * Scan the blockchain for transactions containing specified address hashes. * @param {Hash} start - Block hash to start at. From 34e8f22b9b3845f792feeea0183e17470616c5d5 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 16 May 2019 10:25:58 -0700 Subject: [PATCH 38/40] indexer: update and fix documentation --- lib/indexer/addrindexer.js | 2 +- lib/indexer/indexer.js | 11 ++++++----- lib/indexer/txindexer.js | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/indexer/addrindexer.js b/lib/indexer/addrindexer.js index cdcd78dd5..ce2478ddb 100644 --- a/lib/indexer/addrindexer.js +++ b/lib/indexer/addrindexer.js @@ -1,5 +1,5 @@ /*! - * addrindexer.js - addr indexer + * addrindexer.js - address indexer for bcoin * Copyright (c) 2018, the bcoin developers (MIT License). * https://github.com/bcoin-org/bcoin */ diff --git a/lib/indexer/indexer.js b/lib/indexer/indexer.js index 0628a641e..33f45de9b 100644 --- a/lib/indexer/indexer.js +++ b/lib/indexer/indexer.js @@ -1,5 +1,5 @@ /*! - * indexer.js - storage for indexes + * indexer.js - abstract interface for bcoin indexers * Copyright (c) 2018, the bcoin developers (MIT License). * https://github.com/bcoin-org/bcoin */ @@ -21,11 +21,12 @@ const {ZERO_HASH} = require('../protocol/consensus'); /** * Indexer + * The class which indexers inherit from and implement the + * `indexBlock` and `unindexBlock` methods and database + * and storage initialization for indexing blocks. * @alias module:indexer.Indexer * @extends EventEmitter - * @property {IndexerDB} db - * @property {Number} height - * @emits Indexer#chain tip + * @abstract */ class Indexer extends EventEmitter { @@ -121,7 +122,7 @@ class Indexer extends EventEmitter { } /** - * Close the indexdb, wait for the database to close. + * Close the indexer, wait for the database to close. * @returns {Promise} */ diff --git a/lib/indexer/txindexer.js b/lib/indexer/txindexer.js index 64a5f3acf..ff8c55682 100644 --- a/lib/indexer/txindexer.js +++ b/lib/indexer/txindexer.js @@ -1,5 +1,5 @@ /*! - * txindexer.js - tx indexer + * txindexer.js - transaction indexer for bcoin * Copyright (c) 2018, the bcoin developers (MIT License). * https://github.com/bcoin-org/bcoin */ From 7e9d00a6655fad20ff9ba7eadff129bfdfdbaa49 Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 16 May 2019 10:26:54 -0700 Subject: [PATCH 39/40] node: pass options from `getTXByAddress` to `getMetaByAddress` --- lib/node/fullnode.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/node/fullnode.js b/lib/node/fullnode.js index 0dff21d3c..1c0642b90 100644 --- a/lib/node/fullnode.js +++ b/lib/node/fullnode.js @@ -556,11 +556,15 @@ class FullNode extends Node { * Retrieve transactions pertaining to an * address from the mempool or chain database. * @param {Address} addr + * @param {Object} options + * @param {Number} options.limit + * @param {Number} options.reverse + * @param {Buffer} options.after * @returns {Promise} - Returns {@link TX}[]. */ - async getTXByAddress(addr) { - const mtxs = await this.getMetaByAddress(addr); + async getTXByAddress(addr, options = {}) { + const mtxs = await this.getMetaByAddress(addr, options); const out = []; for (const mtx of mtxs) From 6c497d46f1bd13ffd7a8de47f68a61ba4f5db6cb Mon Sep 17 00:00:00 2001 From: Braydon Fuller Date: Thu, 16 May 2019 10:28:09 -0700 Subject: [PATCH 40/40] changelog: update and fix chain changes --- CHANGELOG.md | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1556f3e18..1694cfbd2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,9 +38,10 @@ migration will be faster as additional network bandwidth won't be used for downloading the blocks again. For those with `txindex` and `addrindex` enabled, the indexes will be -regenerated by rescanning the chain on next startup, this process might -take a while. Please take the potential downtime in re-indexing into -account before upgrading. +regenerated by rescanning the chain on next startup, this process can +take multiple hours (e.g. 8 hours) depending on hardware and the +index. Please take the potential downtime in re-indexing into account +before upgrading. ### Wallet API changes @@ -147,20 +148,29 @@ that have many transactions. ### Chain changes -- The method `getSpentView` accepts a `TXMeta` instead of `TX`. - The transaction index methods are now implemented at `node.txindex`: - `getMeta(hash)` - `getTX(hash)` - `hasTX(hash)` - `getSpentView(tx)` -- The address index method is now implemented at `node.addrindex`: - - `getHashesByAddress(addrs)` -- The following methods require `getHashesByAddress` in conjunction with - `node.txindex.getTX` and `node.txindex.getMeta` respectively. - - `getTXByAddress(addrs)` - - `getMetaByAddress(addrs)` +- The address index method `getHashesByAddress` is now implemented + at `node.addrindex`: + - `getHashesByAddress(addr)` It now accepts `Address` instances + rather than `Address|String` and the results are now sorted in + order of appearance in the blockchain. + - `getHashesByAddress(addr, options)` A new options argument has + been added with the fields: + - `after` - A transaction hash for results to begin after. + - `limit` - The total number of results to return at maximum. + - `reverse` - Will give results in order of latest to oldest. +- The following methods require `node.addrindex.getHashesByAddress` + in conjunction with `node.txindex.getTX` and `node.txindex.getMeta` + respectively, and now includes a new options argument as described + above for `getHashesByAddress`: + - `node.getMetaByAddress(addr, options)` + - `node.getTXByAddress(addr, options)` - The following method has been deprecated: - - `getCoinsByAddress(addrs)` + - `getCoinsByAddress(addr)` ### Other changes