Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: avoid Buffer.from copies #6723

Merged
merged 10 commits into from
May 24, 2024
4 changes: 2 additions & 2 deletions packages/api/src/beacon/server/beacon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
const response = await api.getBlock(...reqSerializers.getBlock.parseReq(req));
if (response instanceof Uint8Array) {
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
return Buffer.from(response.buffer, response.byteOffset, response.byteLength);
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
} else {
return returnTypes.getBlock.toJson(response);
}
Expand All @@ -38,7 +38,7 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
const version = config.getForkName(slot);
void res.header("Eth-Consensus-Version", version);
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
return Buffer.from(response.buffer, response.byteOffset, response.byteLength);
} else {
void res.header("Eth-Consensus-Version", response.version);
return returnTypes.getBlockV2.toJson(response);
Expand Down
2 changes: 1 addition & 1 deletion packages/api/src/beacon/server/debug.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
const response = await api.getState(...reqSerializers.getState.parseReq(req));
if (response instanceof Uint8Array) {
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
return Buffer.from(response.buffer, response.byteOffset, response.byteLength);
} else {
return returnTypes.getState.toJson(response);
}
Expand Down
4 changes: 2 additions & 2 deletions packages/api/src/beacon/server/proof.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
response.set(leaves[i], i * 32);
}
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
return Buffer.from(response.buffer, response.byteOffset, response.byteLength);
},
},
getBlockProof: {
Expand All @@ -39,7 +39,7 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
response.set(leaves[i], i * 32);
}
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
return Buffer.from(response.buffer, response.byteOffset, response.byteLength);
},
},
};
Expand Down
2 changes: 1 addition & 1 deletion packages/beacon-node/src/network/gossip/encoding.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export function msgIdFn(gossipTopicCache: GossipTopicCache, msg: Message): Uint8
vec = [MESSAGE_DOMAIN_VALID_SNAPPY, intToBytes(msg.topic.length, 8), Buffer.from(msg.topic), msg.data];
}

return Buffer.from(digest(Buffer.concat(vec))).subarray(0, 20);
return digest(Buffer.concat(vec)).subarray(0, 20);
}

export class DataTransformSnappy {
Expand Down
10 changes: 6 additions & 4 deletions packages/beacon-node/src/util/sszBytes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ const SLOT_SIZE = 8;
const ATTESTATION_DATA_SIZE = 128;
const SIGNATURE_SIZE = 96;

function toBuffer(x: Uint8Array): Buffer {
return Buffer.from(x.buffer, x.byteOffset, x.byteLength);
}

/**
* Extract slot from attestation serialized bytes.
* Return null if data is not long enough to extract slot.
Expand Down Expand Up @@ -59,9 +63,7 @@ export function getAttDataBase64FromAttestationSerialized(data: Uint8Array): Att
}

// base64 is a bit efficient than hex
return Buffer.from(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)).toString(
"base64"
);
return toBuffer(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)).toString("base64");
}

/**
Expand Down Expand Up @@ -150,7 +152,7 @@ export function getAttDataBase64FromSignedAggregateAndProofSerialized(data: Uint
}

// base64 is a bit efficient than hex
return Buffer.from(
return toBuffer(
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
data.slice(SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET, SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE)
).toString("base64");
}
Expand Down
4 changes: 3 additions & 1 deletion packages/reqresp/src/encoders/responseEncode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import {encodeErrorMessage} from "../utils/index.js";
import {ContextBytesType, ContextBytesFactory, MixedProtocol, Protocol, ResponseOutgoing} from "../types.js";
import {RespStatus, RpcResponseStatusError} from "../interface.js";

const SUCCESS_BUFFER = Buffer.from([RespStatus.SUCCESS]);

/**
* Yields byte chunks for a `<response>` with a zero response code `<result>`
* ```bnf
Expand All @@ -24,7 +26,7 @@ export function responseEncodeSuccess(
cbs.onChunk(chunkIndex++);

// <result>
yield Buffer.from([RespStatus.SUCCESS]);
yield SUCCESS_BUFFER;

// <context-bytes> - from altair
const contextBytes = getContextBytes(protocol.contextBytes, chunk);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ export const writeSszSnappyPayload = encodeSszSnappy as (bytes: Uint8Array) => A
*/
export async function* encodeSszSnappy(bytes: Buffer): AsyncGenerator<Buffer> {
// MUST encode the length of the raw SSZ bytes, encoded as an unsigned protobuf varint
yield Buffer.from(varintEncode(bytes.length));
const varint = varintEncode(bytes.length);
yield Buffer.from(varint.buffer, varint.byteOffset, varint.byteLength);

// By first computing and writing the SSZ byte length, the SSZ encoder can then directly
// write the chunk contents to the stream. Snappy writer compresses frame by frame
Expand Down
6 changes: 3 additions & 3 deletions packages/reqresp/src/utils/errorMessage.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {encodeSszSnappy} from "../encodingStrategies/sszSnappy/encode.js";
import {writeSszSnappyPayload} from "../encodingStrategies/sszSnappy/encode.js";
import {Encoding} from "../types.js";

// ErrorMessage schema:
Expand All @@ -17,11 +17,11 @@ import {Encoding} from "../types.js";
*/
export async function* encodeErrorMessage(errorMessage: string, encoding: Encoding): AsyncGenerator<Buffer> {
const encoder = new TextEncoder();
const bytes = Buffer.from(encoder.encode(errorMessage).slice(0, 256));
const bytes = encoder.encode(errorMessage).slice(0, 256);

switch (encoding) {
case Encoding.SSZ_SNAPPY:
yield* encodeSszSnappy(bytes);
yield* writeSszSnappyPayload(bytes);
}
}

Expand Down
7 changes: 2 additions & 5 deletions packages/state-transition/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,9 @@
"@lodestar/params": "^1.18.0",
"@lodestar/types": "^1.18.0",
"@lodestar/utils": "^1.18.0",
"bigint-buffer": "^1.1.5",
"buffer-xor": "^2.0.2"
},
"devDependencies": {
"@types/buffer-xor": "^2.0.0"
"bigint-buffer": "^1.1.5"
},
"devDependencies": {},
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
"keywords": [
"ethereum",
"eth-consensus",
Expand Down
11 changes: 9 additions & 2 deletions packages/state-transition/src/block/processRandao.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import xor from "buffer-xor";
import {digest} from "@chainsafe/as-sha256";
import {allForks} from "@lodestar/types";
import {EPOCHS_PER_HISTORICAL_VECTOR} from "@lodestar/params";
Expand Down Expand Up @@ -28,6 +27,14 @@ export function processRandao(
}

// mix in RANDAO reveal
const randaoMix = xor(Buffer.from(getRandaoMix(state, epoch)), Buffer.from(digest(randaoReveal)));
const randaoMix = xor(getRandaoMix(state, epoch), digest(randaoReveal));
state.randaoMixes.set(epoch % EPOCHS_PER_HISTORICAL_VECTOR, randaoMix);
}

function xor(a: Uint8Array, b: Uint8Array): Uint8Array {
const length = Math.min(a.length, b.length);
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
for (let i = 0; i < length; i++) {
a[i] = a[i] ^ b[i];
}
return a;
}
2 changes: 1 addition & 1 deletion packages/state-transition/src/cache/pubkeyCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ function toMemoryEfficientHexStr(hex: Uint8Array | string): string {
return hex;
}

return Buffer.from(hex).toString("hex");
return Buffer.from(hex.buffer, hex.byteOffset, hex.byteLength).toString("hex");
}

export class PubkeyIndexMap {
Expand Down
2 changes: 1 addition & 1 deletion packages/state-transition/src/util/shuffle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void

// Seed is always the first 32 bytes of the hash input, we never have to change this part of the buffer.
const _seed = seed;
Buffer.from(_seed).copy(buf, 0, 0, _SHUFFLE_H_SEED_SIZE);
buf.set(_seed.subarray(0, _SHUFFLE_H_SEED_SIZE), 0);
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved

// initial values here are not used: overwritten first within the inner for loop.
let source = seed; // just setting it to a Bytes32
Expand Down
4 changes: 2 additions & 2 deletions packages/validator/src/repositories/metaDataRepository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export class MetaDataRepository {
}

async setGenesisValidatorsRoot(genesisValidatorsRoot: Root): Promise<void> {
await this.db.put(this.encodeKey(GENESIS_VALIDATORS_ROOT), Buffer.from(genesisValidatorsRoot), this.dbReqOpts);
await this.db.put(this.encodeKey(GENESIS_VALIDATORS_ROOT), genesisValidatorsRoot, this.dbReqOpts);
}

async getGenesisTime(): Promise<UintNum64 | null> {
Expand All @@ -34,7 +34,7 @@ export class MetaDataRepository {
}

async setGenesisTime(genesisTime: UintNum64): Promise<void> {
await this.db.put(this.encodeKey(GENESIS_TIME), Buffer.from(ssz.UintNum64.serialize(genesisTime)), this.dbReqOpts);
await this.db.put(this.encodeKey(GENESIS_TIME), ssz.UintNum64.serialize(genesisTime), this.dbReqOpts);
}

private encodeKey(key: Uint8Array): Uint8Array {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export class AttestationByTargetRepository {
await this.db.batchPut(
atts.map((att) => ({
key: this.encodeKey(pubkey, att.targetEpoch),
value: Buffer.from(this.type.serialize(att)),
value: this.type.serialize(att),
})),
this.dbReqOpts
);
Expand All @@ -62,7 +62,7 @@ export class AttestationByTargetRepository {
}

private encodeKey(pubkey: BLSPubkey, targetEpoch: Epoch): Uint8Array {
return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(targetEpoch), uintLen, "be")]));
return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(targetEpoch), uintLen, "be")]));
}

private decodeKey(key: Uint8Array): {pubkey: BLSPubkey; targetEpoch: Epoch} {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ export class AttestationLowerBoundRepository {
}

async set(pubkey: BLSPubkey, value: SlashingProtectionLowerBound): Promise<void> {
await this.db.put(this.encodeKey(pubkey), Buffer.from(this.type.serialize(value)), this.dbReqOpts);
await this.db.put(this.encodeKey(pubkey), this.type.serialize(value), this.dbReqOpts);
}

private encodeKey(pubkey: BLSPubkey): Uint8Array {
return encodeKey(this.bucket, Buffer.from(pubkey));
return encodeKey(this.bucket, pubkey);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ export class BlockBySlotRepository {
await this.db.batchPut(
blocks.map((block) => ({
key: this.encodeKey(pubkey, block.slot),
value: Buffer.from(this.type.serialize(block)),
value: this.type.serialize(block),
})),
this.dbReqOpts
);
Expand All @@ -66,7 +66,7 @@ export class BlockBySlotRepository {
}

private encodeKey(pubkey: BLSPubkey, slot: Slot): Uint8Array {
return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(slot), uintLen, "be")]));
return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(slot), uintLen, "be")]));
}

private decodeKey(key: Uint8Array): {pubkey: BLSPubkey; slot: Slot} {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,13 @@ class SpanDistanceRepository {
await this.db.batchPut(
values.map((value) => ({
key: this.encodeKey(pubkey, value.source),
value: Buffer.from(this.type.serialize(value.distance)),
value: this.type.serialize(value.distance),
})),
this.dbReqOpts
);
}

private encodeKey(pubkey: BLSPubkey, epoch: Epoch): Uint8Array {
return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(epoch), 8, "be")]));
return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(epoch), 8, "be")]));
}
}
14 changes: 0 additions & 14 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2836,13 +2836,6 @@
resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-1.0.38.tgz#a81fd8606d481f873a3800c6ebae4f1d768a56a9"
integrity sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA==

"@types/buffer-xor@^2.0.0":
version "2.0.2"
resolved "https://registry.yarnpkg.com/@types/buffer-xor/-/buffer-xor-2.0.2.tgz#d8c463583b8fbb322ea824562dc78a0c3cea2ca6"
integrity sha512-OqdCua7QCTupPnJgmyGJUpxWgbuOi0IMIVslXTSePS2o+qDrDB6f2Pg44zRyqhUA5GbFAf39U8z0+mH4WG0fLQ==
dependencies:
"@types/node" "*"

"@types/cacheable-request@^6.0.1":
version "6.0.3"
resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.3.tgz#a430b3260466ca7b5ca5bfd735693b36e7a9d183"
Expand Down Expand Up @@ -4461,13 +4454,6 @@ buffer-xor@^1.0.3:
resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz"
integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=

buffer-xor@^2.0.2:
version "2.0.2"
resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-2.0.2.tgz"
integrity sha512-eHslX0bin3GB+Lx2p7lEYRShRewuNZL3fUl4qlVJGGiwoPGftmt8JQgk2Y9Ji5/01TnVDo33E5b5O3vUB1HdqQ==
dependencies:
safe-buffer "^5.1.1"

buffer@4.9.2, buffer@^4.3.0:
version "4.9.2"
resolved "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz"
Expand Down
Loading