diff --git a/packages/api/src/beacon/server/beacon.ts b/packages/api/src/beacon/server/beacon.ts index c71decd1ac8e..cd1ed72fb586 100644 --- a/packages/api/src/beacon/server/beacon.ts +++ b/packages/api/src/beacon/server/beacon.ts @@ -22,8 +22,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerR handler: async (req) => { const response = await api.getBlock(...reqSerializers.getBlock.parseReq(req)); if (response instanceof Uint8Array) { - // Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer - return Buffer.from(response); + // Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray + return response; } else { return returnTypes.getBlock.toJson(response); } @@ -37,8 +37,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerR const slot = extractSlotFromBlockBytes(response); const version = config.getForkName(slot); void res.header("Eth-Consensus-Version", version); - // Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer - return Buffer.from(response); + // Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray + return response; } else { void res.header("Eth-Consensus-Version", response.version); return returnTypes.getBlockV2.toJson(response); diff --git a/packages/api/src/beacon/server/debug.ts b/packages/api/src/beacon/server/debug.ts index 34edf91a5809..553e08afddd0 100644 --- a/packages/api/src/beacon/server/debug.ts +++ b/packages/api/src/beacon/server/debug.ts @@ -23,8 +23,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerR handler: async (req) => { const response = await api.getState(...reqSerializers.getState.parseReq(req)); if (response instanceof Uint8Array) { - // Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer - return Buffer.from(response); + // Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray + return response; } else { return returnTypes.getState.toJson(response); } @@ -38,8 +38,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerR const slot = extractSlotFromStateBytes(response); const version = config.getForkName(slot); void res.header("Eth-Consensus-Version", version); - // Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer - return Buffer.from(response.buffer, response.byteOffset, response.byteLength); + // Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray + return response; } else { void res.header("Eth-Consensus-Version", response.version); return returnTypes.getStateV2.toJson(response); diff --git a/packages/api/src/beacon/server/proof.ts b/packages/api/src/beacon/server/proof.ts index a03ae472bf78..bc865626ba72 100644 --- a/packages/api/src/beacon/server/proof.ts +++ b/packages/api/src/beacon/server/proof.ts @@ -24,8 +24,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerR for (let i = 0; i < leaves.length; i++) { response.set(leaves[i], i * 32); } - // Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer - return Buffer.from(response); + // Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray + return response; }, }, getBlockProof: { @@ -38,8 +38,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerR for (let i = 0; i < leaves.length; i++) { response.set(leaves[i], i * 32); } - // Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer - return Buffer.from(response); + // Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray + return response; }, }, }; diff --git a/packages/beacon-node/src/network/gossip/encoding.ts b/packages/beacon-node/src/network/gossip/encoding.ts index bcb6d9181bc8..3b87f42747ab 100644 --- a/packages/beacon-node/src/network/gossip/encoding.ts +++ b/packages/beacon-node/src/network/gossip/encoding.ts @@ -58,7 +58,7 @@ export function msgIdFn(gossipTopicCache: GossipTopicCache, msg: Message): Uint8 vec = [MESSAGE_DOMAIN_VALID_SNAPPY, intToBytes(msg.topic.length, 8), Buffer.from(msg.topic), msg.data]; } - return Buffer.from(digest(Buffer.concat(vec))).subarray(0, 20); + return digest(Buffer.concat(vec)).subarray(0, 20); } export class DataTransformSnappy { diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index cd12c4bd9c18..802b9a266ab1 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -59,9 +59,7 @@ export function getAttDataBase64FromAttestationSerialized(data: Uint8Array): Att } // base64 is a bit efficient than hex - return Buffer.from(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)).toString( - "base64" - ); + return toBase64(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)); } /** @@ -150,9 +148,9 @@ export function getAttDataBase64FromSignedAggregateAndProofSerialized(data: Uint } // base64 is a bit efficient than hex - return Buffer.from( + return toBase64( data.slice(SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET, SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE) - ).toString("base64"); + ); } /** @@ -206,3 +204,7 @@ function getSlotFromOffset(data: Uint8Array, offset: number): Slot { // Read only the first 4 bytes of Slot, max value is 4,294,967,295 will be reached 1634 years after genesis return dv.getUint32(offset, true); } + +function toBase64(data: Uint8Array): string { + return Buffer.from(data.buffer, data.byteOffset, data.byteLength).toString("base64"); +} diff --git a/packages/beacon-node/test/perf/util/bytes.test.ts b/packages/beacon-node/test/perf/util/bytes.test.ts index bc3f6cb72e0d..d1d2e968b181 100644 --- a/packages/beacon-node/test/perf/util/bytes.test.ts +++ b/packages/beacon-node/test/perf/util/bytes.test.ts @@ -34,4 +34,40 @@ describe("bytes utils", function () { } }, }); + + itBench({ + id: "Buffer.copy", + fn: () => { + const arr = Buffer.alloc(32 * count); + let offset = 0; + for (const b of buffers) { + b.copy(arr, offset, 0, b.length); + offset += b.length; + } + }, + }); + + itBench({ + id: "Uint8Array.set - with subarray", + fn: () => { + const arr = new Uint8Array(32 * count); + let offset = 0; + for (const b of roots) { + arr.set(b.subarray(0, b.length), offset); + offset += b.length; + } + }, + }); + + itBench({ + id: "Uint8Array.set - without subarray", + fn: () => { + const arr = new Uint8Array(32 * count); + let offset = 0; + for (const b of roots) { + arr.set(b, offset); + offset += b.length; + } + }, + }); }); diff --git a/packages/reqresp/src/encoders/responseEncode.ts b/packages/reqresp/src/encoders/responseEncode.ts index 3e2d875685e8..c5320ffc1ce9 100644 --- a/packages/reqresp/src/encoders/responseEncode.ts +++ b/packages/reqresp/src/encoders/responseEncode.ts @@ -3,6 +3,8 @@ import {encodeErrorMessage} from "../utils/index.js"; import {ContextBytesType, ContextBytesFactory, MixedProtocol, Protocol, ResponseOutgoing} from "../types.js"; import {RespStatus, RpcResponseStatusError} from "../interface.js"; +const SUCCESS_BUFFER = Buffer.from([RespStatus.SUCCESS]); + /** * Yields byte chunks for a `` with a zero response code `` * ```bnf @@ -24,7 +26,7 @@ export function responseEncodeSuccess( cbs.onChunk(chunkIndex++); // - yield Buffer.from([RespStatus.SUCCESS]); + yield SUCCESS_BUFFER; // - from altair const contextBytes = getContextBytes(protocol.contextBytes, chunk); diff --git a/packages/reqresp/src/encodingStrategies/sszSnappy/encode.ts b/packages/reqresp/src/encodingStrategies/sszSnappy/encode.ts index 930aa242766d..f4559b91f3fe 100644 --- a/packages/reqresp/src/encodingStrategies/sszSnappy/encode.ts +++ b/packages/reqresp/src/encodingStrategies/sszSnappy/encode.ts @@ -15,7 +15,8 @@ export const writeSszSnappyPayload = encodeSszSnappy as (bytes: Uint8Array) => A */ export async function* encodeSszSnappy(bytes: Buffer): AsyncGenerator { // MUST encode the length of the raw SSZ bytes, encoded as an unsigned protobuf varint - yield Buffer.from(varintEncode(bytes.length)); + const varint = varintEncode(bytes.length); + yield Buffer.from(varint.buffer, varint.byteOffset, varint.byteLength); // By first computing and writing the SSZ byte length, the SSZ encoder can then directly // write the chunk contents to the stream. Snappy writer compresses frame by frame diff --git a/packages/reqresp/src/utils/errorMessage.ts b/packages/reqresp/src/utils/errorMessage.ts index c9ca6505d282..3af32a8f5c5c 100644 --- a/packages/reqresp/src/utils/errorMessage.ts +++ b/packages/reqresp/src/utils/errorMessage.ts @@ -1,4 +1,4 @@ -import {encodeSszSnappy} from "../encodingStrategies/sszSnappy/encode.js"; +import {writeSszSnappyPayload} from "../encodingStrategies/sszSnappy/encode.js"; import {Encoding} from "../types.js"; // ErrorMessage schema: @@ -17,11 +17,11 @@ import {Encoding} from "../types.js"; */ export async function* encodeErrorMessage(errorMessage: string, encoding: Encoding): AsyncGenerator { const encoder = new TextEncoder(); - const bytes = Buffer.from(encoder.encode(errorMessage).slice(0, 256)); + const bytes = encoder.encode(errorMessage).slice(0, 256); switch (encoding) { case Encoding.SSZ_SNAPPY: - yield* encodeSszSnappy(bytes); + yield* writeSszSnappyPayload(bytes); } } diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index 0d68d64f3451..4895c86be364 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -68,11 +68,7 @@ "@lodestar/params": "^1.18.1", "@lodestar/types": "^1.18.1", "@lodestar/utils": "^1.18.1", - "bigint-buffer": "^1.1.5", - "buffer-xor": "^2.0.2" - }, - "devDependencies": { - "@types/buffer-xor": "^2.0.0" + "bigint-buffer": "^1.1.5" }, "keywords": [ "ethereum", diff --git a/packages/state-transition/src/block/processRandao.ts b/packages/state-transition/src/block/processRandao.ts index cf9ea193b944..effeb34390cb 100644 --- a/packages/state-transition/src/block/processRandao.ts +++ b/packages/state-transition/src/block/processRandao.ts @@ -1,4 +1,3 @@ -import xor from "buffer-xor"; import {digest} from "@chainsafe/as-sha256"; import {allForks} from "@lodestar/types"; import {EPOCHS_PER_HISTORICAL_VECTOR} from "@lodestar/params"; @@ -28,6 +27,14 @@ export function processRandao( } // mix in RANDAO reveal - const randaoMix = xor(Buffer.from(getRandaoMix(state, epoch)), Buffer.from(digest(randaoReveal))); + const randaoMix = xor(getRandaoMix(state, epoch), digest(randaoReveal)); state.randaoMixes.set(epoch % EPOCHS_PER_HISTORICAL_VECTOR, randaoMix); } + +function xor(a: Uint8Array, b: Uint8Array): Uint8Array { + const length = Math.min(a.length, b.length); + for (let i = 0; i < length; i++) { + a[i] = a[i] ^ b[i]; + } + return a; +} diff --git a/packages/state-transition/src/cache/pubkeyCache.ts b/packages/state-transition/src/cache/pubkeyCache.ts index 64a3f4f23c8f..fdc343ff4ec7 100644 --- a/packages/state-transition/src/cache/pubkeyCache.ts +++ b/packages/state-transition/src/cache/pubkeyCache.ts @@ -23,7 +23,7 @@ function toMemoryEfficientHexStr(hex: Uint8Array | string): string { return hex; } - return Buffer.from(hex).toString("hex"); + return Buffer.from(hex.buffer, hex.byteOffset, hex.byteLength).toString("hex"); } export class PubkeyIndexMap { diff --git a/packages/state-transition/src/util/shuffle.ts b/packages/state-transition/src/util/shuffle.ts index 07457c5975d0..39137bca69d0 100644 --- a/packages/state-transition/src/util/shuffle.ts +++ b/packages/state-transition/src/util/shuffle.ts @@ -90,6 +90,8 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void const listSize = input.length >>> 0; // check if list size fits in uint32 assert.equal(listSize, input.length, "input length does not fit uint32"); + // check that the seed is 32 bytes + assert.lte(seed.length, _SHUFFLE_H_SEED_SIZE, `seed length is not lte ${_SHUFFLE_H_SEED_SIZE} bytes`); const buf = Buffer.alloc(_SHUFFLE_H_TOTAL_SIZE); let r = 0; @@ -100,8 +102,7 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void } // Seed is always the first 32 bytes of the hash input, we never have to change this part of the buffer. - const _seed = seed; - Buffer.from(_seed).copy(buf, 0, 0, _SHUFFLE_H_SEED_SIZE); + buf.set(seed, 0); // initial values here are not used: overwritten first within the inner for loop. let source = seed; // just setting it to a Bytes32 @@ -114,8 +115,8 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void buf[_SHUFFLE_H_SEED_SIZE] = r; // Seed is already in place, now just hash the correct part of the buffer, and take a uint64 from it, // and modulo it to get a pivot within range. - const h = digest(buf.slice(0, _SHUFFLE_H_PIVOT_VIEW_SIZE)); - const pivot = Number(bytesToBigInt(h.slice(0, 8)) % BigInt(listSize)) >>> 0; + const h = digest(buf.subarray(0, _SHUFFLE_H_PIVOT_VIEW_SIZE)); + const pivot = Number(bytesToBigInt(h.subarray(0, 8)) % BigInt(listSize)) >>> 0; // Split up the for-loop in two: // 1. Handle the part from 0 (incl) to pivot (incl). This is mirrored around (pivot / 2) diff --git a/packages/utils/src/verifyMerkleBranch.ts b/packages/utils/src/verifyMerkleBranch.ts index e109813dd29e..7bb090da06e4 100644 --- a/packages/utils/src/verifyMerkleBranch.ts +++ b/packages/utils/src/verifyMerkleBranch.ts @@ -23,5 +23,5 @@ export function verifyMerkleBranch( value = digest64(Buffer.concat([value, proof[i]])); } } - return Buffer.from(value).equals(root); + return Buffer.from(value.buffer, value.byteOffset, value.byteLength).equals(root); } diff --git a/packages/validator/src/repositories/metaDataRepository.ts b/packages/validator/src/repositories/metaDataRepository.ts index c7824094741d..0ba1565a6651 100644 --- a/packages/validator/src/repositories/metaDataRepository.ts +++ b/packages/validator/src/repositories/metaDataRepository.ts @@ -25,7 +25,7 @@ export class MetaDataRepository { } async setGenesisValidatorsRoot(genesisValidatorsRoot: Root): Promise { - await this.db.put(this.encodeKey(GENESIS_VALIDATORS_ROOT), Buffer.from(genesisValidatorsRoot), this.dbReqOpts); + await this.db.put(this.encodeKey(GENESIS_VALIDATORS_ROOT), genesisValidatorsRoot, this.dbReqOpts); } async getGenesisTime(): Promise { @@ -34,7 +34,7 @@ export class MetaDataRepository { } async setGenesisTime(genesisTime: UintNum64): Promise { - await this.db.put(this.encodeKey(GENESIS_TIME), Buffer.from(ssz.UintNum64.serialize(genesisTime)), this.dbReqOpts); + await this.db.put(this.encodeKey(GENESIS_TIME), ssz.UintNum64.serialize(genesisTime), this.dbReqOpts); } private encodeKey(key: Uint8Array): Uint8Array { diff --git a/packages/validator/src/slashingProtection/attestation/attestationByTargetRepository.ts b/packages/validator/src/slashingProtection/attestation/attestationByTargetRepository.ts index 7606d31338c4..61c13bb17ce2 100644 --- a/packages/validator/src/slashingProtection/attestation/attestationByTargetRepository.ts +++ b/packages/validator/src/slashingProtection/attestation/attestationByTargetRepository.ts @@ -50,7 +50,7 @@ export class AttestationByTargetRepository { await this.db.batchPut( atts.map((att) => ({ key: this.encodeKey(pubkey, att.targetEpoch), - value: Buffer.from(this.type.serialize(att)), + value: this.type.serialize(att), })), this.dbReqOpts ); @@ -62,7 +62,7 @@ export class AttestationByTargetRepository { } private encodeKey(pubkey: BLSPubkey, targetEpoch: Epoch): Uint8Array { - return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(targetEpoch), uintLen, "be")])); + return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(targetEpoch), uintLen, "be")])); } private decodeKey(key: Uint8Array): {pubkey: BLSPubkey; targetEpoch: Epoch} { diff --git a/packages/validator/src/slashingProtection/attestation/attestationLowerBoundRepository.ts b/packages/validator/src/slashingProtection/attestation/attestationLowerBoundRepository.ts index d45814e6648b..84a4e7032a70 100644 --- a/packages/validator/src/slashingProtection/attestation/attestationLowerBoundRepository.ts +++ b/packages/validator/src/slashingProtection/attestation/attestationLowerBoundRepository.ts @@ -35,10 +35,10 @@ export class AttestationLowerBoundRepository { } async set(pubkey: BLSPubkey, value: SlashingProtectionLowerBound): Promise { - await this.db.put(this.encodeKey(pubkey), Buffer.from(this.type.serialize(value)), this.dbReqOpts); + await this.db.put(this.encodeKey(pubkey), this.type.serialize(value), this.dbReqOpts); } private encodeKey(pubkey: BLSPubkey): Uint8Array { - return encodeKey(this.bucket, Buffer.from(pubkey)); + return encodeKey(this.bucket, pubkey); } } diff --git a/packages/validator/src/slashingProtection/block/blockBySlotRepository.ts b/packages/validator/src/slashingProtection/block/blockBySlotRepository.ts index 25b2fee7e8e5..eb1ae092cf3f 100644 --- a/packages/validator/src/slashingProtection/block/blockBySlotRepository.ts +++ b/packages/validator/src/slashingProtection/block/blockBySlotRepository.ts @@ -54,7 +54,7 @@ export class BlockBySlotRepository { await this.db.batchPut( blocks.map((block) => ({ key: this.encodeKey(pubkey, block.slot), - value: Buffer.from(this.type.serialize(block)), + value: this.type.serialize(block), })), this.dbReqOpts ); @@ -66,7 +66,7 @@ export class BlockBySlotRepository { } private encodeKey(pubkey: BLSPubkey, slot: Slot): Uint8Array { - return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(slot), uintLen, "be")])); + return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(slot), uintLen, "be")])); } private decodeKey(key: Uint8Array): {pubkey: BLSPubkey; slot: Slot} { diff --git a/packages/validator/src/slashingProtection/minMaxSurround/distanceStoreRepository.ts b/packages/validator/src/slashingProtection/minMaxSurround/distanceStoreRepository.ts index db8345a90cc2..45315bf95b03 100644 --- a/packages/validator/src/slashingProtection/minMaxSurround/distanceStoreRepository.ts +++ b/packages/validator/src/slashingProtection/minMaxSurround/distanceStoreRepository.ts @@ -45,13 +45,13 @@ class SpanDistanceRepository { await this.db.batchPut( values.map((value) => ({ key: this.encodeKey(pubkey, value.source), - value: Buffer.from(this.type.serialize(value.distance)), + value: this.type.serialize(value.distance), })), this.dbReqOpts ); } private encodeKey(pubkey: BLSPubkey, epoch: Epoch): Uint8Array { - return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(epoch), 8, "be")])); + return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(epoch), 8, "be")])); } } diff --git a/yarn.lock b/yarn.lock index 5ade182e9b33..2425ecd3ca85 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2836,13 +2836,6 @@ resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-1.0.38.tgz#a81fd8606d481f873a3800c6ebae4f1d768a56a9" integrity sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA== -"@types/buffer-xor@^2.0.0": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/buffer-xor/-/buffer-xor-2.0.2.tgz#d8c463583b8fbb322ea824562dc78a0c3cea2ca6" - integrity sha512-OqdCua7QCTupPnJgmyGJUpxWgbuOi0IMIVslXTSePS2o+qDrDB6f2Pg44zRyqhUA5GbFAf39U8z0+mH4WG0fLQ== - dependencies: - "@types/node" "*" - "@types/cacheable-request@^6.0.1": version "6.0.3" resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.3.tgz#a430b3260466ca7b5ca5bfd735693b36e7a9d183" @@ -4461,13 +4454,6 @@ buffer-xor@^1.0.3: resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= -buffer-xor@^2.0.2: - version "2.0.2" - resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-2.0.2.tgz" - integrity sha512-eHslX0bin3GB+Lx2p7lEYRShRewuNZL3fUl4qlVJGGiwoPGftmt8JQgk2Y9Ji5/01TnVDo33E5b5O3vUB1HdqQ== - dependencies: - safe-buffer "^5.1.1" - buffer@4.9.2, buffer@^4.3.0: version "4.9.2" resolved "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz"