Skip to content

Commit

Permalink
Merge 1c687b7 into 7f63fab
Browse files Browse the repository at this point in the history
  • Loading branch information
wemeetagain authored May 24, 2024
2 parents 7f63fab + 1c687b7 commit bbf146f
Show file tree
Hide file tree
Showing 20 changed files with 91 additions and 60 deletions.
8 changes: 4 additions & 4 deletions packages/api/src/beacon/server/beacon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
handler: async (req) => {
const response = await api.getBlock(...reqSerializers.getBlock.parseReq(req));
if (response instanceof Uint8Array) {
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
// Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray
return response;
} else {
return returnTypes.getBlock.toJson(response);
}
Expand All @@ -37,8 +37,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
const slot = extractSlotFromBlockBytes(response);
const version = config.getForkName(slot);
void res.header("Eth-Consensus-Version", version);
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
// Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray
return response;
} else {
void res.header("Eth-Consensus-Version", response.version);
return returnTypes.getBlockV2.toJson(response);
Expand Down
8 changes: 4 additions & 4 deletions packages/api/src/beacon/server/debug.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
handler: async (req) => {
const response = await api.getState(...reqSerializers.getState.parseReq(req));
if (response instanceof Uint8Array) {
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
// Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray
return response;
} else {
return returnTypes.getState.toJson(response);
}
Expand All @@ -38,8 +38,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
const slot = extractSlotFromStateBytes(response);
const version = config.getForkName(slot);
void res.header("Eth-Consensus-Version", version);
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response.buffer, response.byteOffset, response.byteLength);
// Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray
return response;
} else {
void res.header("Eth-Consensus-Version", response.version);
return returnTypes.getStateV2.toJson(response);
Expand Down
8 changes: 4 additions & 4 deletions packages/api/src/beacon/server/proof.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
for (let i = 0; i < leaves.length; i++) {
response.set(leaves[i], i * 32);
}
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
// Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray
return response;
},
},
getBlockProof: {
Expand All @@ -38,8 +38,8 @@ export function getRoutes(config: ChainForkConfig, api: ServerApi<Api>): ServerR
for (let i = 0; i < leaves.length; i++) {
response.set(leaves[i], i * 32);
}
// Fastify 3.x.x will automatically add header `Content-Type: application/octet-stream` if Buffer
return Buffer.from(response);
// Fastify 4.x.x will automatically add header `Content-Type: application/octet-stream` if TypedArray
return response;
},
},
};
Expand Down
2 changes: 1 addition & 1 deletion packages/beacon-node/src/network/gossip/encoding.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export function msgIdFn(gossipTopicCache: GossipTopicCache, msg: Message): Uint8
vec = [MESSAGE_DOMAIN_VALID_SNAPPY, intToBytes(msg.topic.length, 8), Buffer.from(msg.topic), msg.data];
}

return Buffer.from(digest(Buffer.concat(vec))).subarray(0, 20);
return digest(Buffer.concat(vec)).subarray(0, 20);
}

export class DataTransformSnappy {
Expand Down
12 changes: 7 additions & 5 deletions packages/beacon-node/src/util/sszBytes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,7 @@ export function getAttDataBase64FromAttestationSerialized(data: Uint8Array): Att
}

// base64 is a bit efficient than hex
return Buffer.from(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)).toString(
"base64"
);
return toBase64(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE));
}

/**
Expand Down Expand Up @@ -150,9 +148,9 @@ export function getAttDataBase64FromSignedAggregateAndProofSerialized(data: Uint
}

// base64 is a bit efficient than hex
return Buffer.from(
return toBase64(
data.slice(SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET, SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE)
).toString("base64");
);
}

/**
Expand Down Expand Up @@ -206,3 +204,7 @@ function getSlotFromOffset(data: Uint8Array, offset: number): Slot {
// Read only the first 4 bytes of Slot, max value is 4,294,967,295 will be reached 1634 years after genesis
return dv.getUint32(offset, true);
}

function toBase64(data: Uint8Array): string {
return Buffer.from(data.buffer, data.byteOffset, data.byteLength).toString("base64");
}
36 changes: 36 additions & 0 deletions packages/beacon-node/test/perf/util/bytes.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,40 @@ describe("bytes utils", function () {
}
},
});

itBench({
id: "Buffer.copy",
fn: () => {
const arr = Buffer.alloc(32 * count);
let offset = 0;
for (const b of buffers) {
b.copy(arr, offset, 0, b.length);
offset += b.length;
}
},
});

itBench({
id: "Uint8Array.set - with subarray",
fn: () => {
const arr = new Uint8Array(32 * count);
let offset = 0;
for (const b of roots) {
arr.set(b.subarray(0, b.length), offset);
offset += b.length;
}
},
});

itBench({
id: "Uint8Array.set - without subarray",
fn: () => {
const arr = new Uint8Array(32 * count);
let offset = 0;
for (const b of roots) {
arr.set(b, offset);
offset += b.length;
}
},
});
});
4 changes: 3 additions & 1 deletion packages/reqresp/src/encoders/responseEncode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import {encodeErrorMessage} from "../utils/index.js";
import {ContextBytesType, ContextBytesFactory, MixedProtocol, Protocol, ResponseOutgoing} from "../types.js";
import {RespStatus, RpcResponseStatusError} from "../interface.js";

const SUCCESS_BUFFER = Buffer.from([RespStatus.SUCCESS]);

/**
* Yields byte chunks for a `<response>` with a zero response code `<result>`
* ```bnf
Expand All @@ -24,7 +26,7 @@ export function responseEncodeSuccess(
cbs.onChunk(chunkIndex++);

// <result>
yield Buffer.from([RespStatus.SUCCESS]);
yield SUCCESS_BUFFER;

// <context-bytes> - from altair
const contextBytes = getContextBytes(protocol.contextBytes, chunk);
Expand Down
3 changes: 2 additions & 1 deletion packages/reqresp/src/encodingStrategies/sszSnappy/encode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ export const writeSszSnappyPayload = encodeSszSnappy as (bytes: Uint8Array) => A
*/
export async function* encodeSszSnappy(bytes: Buffer): AsyncGenerator<Buffer> {
// MUST encode the length of the raw SSZ bytes, encoded as an unsigned protobuf varint
yield Buffer.from(varintEncode(bytes.length));
const varint = varintEncode(bytes.length);
yield Buffer.from(varint.buffer, varint.byteOffset, varint.byteLength);

// By first computing and writing the SSZ byte length, the SSZ encoder can then directly
// write the chunk contents to the stream. Snappy writer compresses frame by frame
Expand Down
6 changes: 3 additions & 3 deletions packages/reqresp/src/utils/errorMessage.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {encodeSszSnappy} from "../encodingStrategies/sszSnappy/encode.js";
import {writeSszSnappyPayload} from "../encodingStrategies/sszSnappy/encode.js";
import {Encoding} from "../types.js";

// ErrorMessage schema:
Expand All @@ -17,11 +17,11 @@ import {Encoding} from "../types.js";
*/
export async function* encodeErrorMessage(errorMessage: string, encoding: Encoding): AsyncGenerator<Buffer> {
const encoder = new TextEncoder();
const bytes = Buffer.from(encoder.encode(errorMessage).slice(0, 256));
const bytes = encoder.encode(errorMessage).slice(0, 256);

switch (encoding) {
case Encoding.SSZ_SNAPPY:
yield* encodeSszSnappy(bytes);
yield* writeSszSnappyPayload(bytes);
}
}

Expand Down
6 changes: 1 addition & 5 deletions packages/state-transition/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,7 @@
"@lodestar/params": "^1.18.1",
"@lodestar/types": "^1.18.1",
"@lodestar/utils": "^1.18.1",
"bigint-buffer": "^1.1.5",
"buffer-xor": "^2.0.2"
},
"devDependencies": {
"@types/buffer-xor": "^2.0.0"
"bigint-buffer": "^1.1.5"
},
"keywords": [
"ethereum",
Expand Down
11 changes: 9 additions & 2 deletions packages/state-transition/src/block/processRandao.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import xor from "buffer-xor";
import {digest} from "@chainsafe/as-sha256";
import {allForks} from "@lodestar/types";
import {EPOCHS_PER_HISTORICAL_VECTOR} from "@lodestar/params";
Expand Down Expand Up @@ -28,6 +27,14 @@ export function processRandao(
}

// mix in RANDAO reveal
const randaoMix = xor(Buffer.from(getRandaoMix(state, epoch)), Buffer.from(digest(randaoReveal)));
const randaoMix = xor(getRandaoMix(state, epoch), digest(randaoReveal));
state.randaoMixes.set(epoch % EPOCHS_PER_HISTORICAL_VECTOR, randaoMix);
}

function xor(a: Uint8Array, b: Uint8Array): Uint8Array {
const length = Math.min(a.length, b.length);
for (let i = 0; i < length; i++) {
a[i] = a[i] ^ b[i];
}
return a;
}
2 changes: 1 addition & 1 deletion packages/state-transition/src/cache/pubkeyCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ function toMemoryEfficientHexStr(hex: Uint8Array | string): string {
return hex;
}

return Buffer.from(hex).toString("hex");
return Buffer.from(hex.buffer, hex.byteOffset, hex.byteLength).toString("hex");
}

export class PubkeyIndexMap {
Expand Down
9 changes: 5 additions & 4 deletions packages/state-transition/src/util/shuffle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void
const listSize = input.length >>> 0;
// check if list size fits in uint32
assert.equal(listSize, input.length, "input length does not fit uint32");
// check that the seed is 32 bytes
assert.lte(seed.length, _SHUFFLE_H_SEED_SIZE, `seed length is not lte ${_SHUFFLE_H_SEED_SIZE} bytes`);

const buf = Buffer.alloc(_SHUFFLE_H_TOTAL_SIZE);
let r = 0;
Expand All @@ -100,8 +102,7 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void
}

// Seed is always the first 32 bytes of the hash input, we never have to change this part of the buffer.
const _seed = seed;
Buffer.from(_seed).copy(buf, 0, 0, _SHUFFLE_H_SEED_SIZE);
buf.set(seed, 0);

// initial values here are not used: overwritten first within the inner for loop.
let source = seed; // just setting it to a Bytes32
Expand All @@ -114,8 +115,8 @@ function innerShuffleList(input: Shuffleable, seed: Bytes32, dir: boolean): void
buf[_SHUFFLE_H_SEED_SIZE] = r;
// Seed is already in place, now just hash the correct part of the buffer, and take a uint64 from it,
// and modulo it to get a pivot within range.
const h = digest(buf.slice(0, _SHUFFLE_H_PIVOT_VIEW_SIZE));
const pivot = Number(bytesToBigInt(h.slice(0, 8)) % BigInt(listSize)) >>> 0;
const h = digest(buf.subarray(0, _SHUFFLE_H_PIVOT_VIEW_SIZE));
const pivot = Number(bytesToBigInt(h.subarray(0, 8)) % BigInt(listSize)) >>> 0;

// Split up the for-loop in two:
// 1. Handle the part from 0 (incl) to pivot (incl). This is mirrored around (pivot / 2)
Expand Down
2 changes: 1 addition & 1 deletion packages/utils/src/verifyMerkleBranch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ export function verifyMerkleBranch(
value = digest64(Buffer.concat([value, proof[i]]));
}
}
return Buffer.from(value).equals(root);
return Buffer.from(value.buffer, value.byteOffset, value.byteLength).equals(root);
}
4 changes: 2 additions & 2 deletions packages/validator/src/repositories/metaDataRepository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export class MetaDataRepository {
}

async setGenesisValidatorsRoot(genesisValidatorsRoot: Root): Promise<void> {
await this.db.put(this.encodeKey(GENESIS_VALIDATORS_ROOT), Buffer.from(genesisValidatorsRoot), this.dbReqOpts);
await this.db.put(this.encodeKey(GENESIS_VALIDATORS_ROOT), genesisValidatorsRoot, this.dbReqOpts);
}

async getGenesisTime(): Promise<UintNum64 | null> {
Expand All @@ -34,7 +34,7 @@ export class MetaDataRepository {
}

async setGenesisTime(genesisTime: UintNum64): Promise<void> {
await this.db.put(this.encodeKey(GENESIS_TIME), Buffer.from(ssz.UintNum64.serialize(genesisTime)), this.dbReqOpts);
await this.db.put(this.encodeKey(GENESIS_TIME), ssz.UintNum64.serialize(genesisTime), this.dbReqOpts);
}

private encodeKey(key: Uint8Array): Uint8Array {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export class AttestationByTargetRepository {
await this.db.batchPut(
atts.map((att) => ({
key: this.encodeKey(pubkey, att.targetEpoch),
value: Buffer.from(this.type.serialize(att)),
value: this.type.serialize(att),
})),
this.dbReqOpts
);
Expand All @@ -62,7 +62,7 @@ export class AttestationByTargetRepository {
}

private encodeKey(pubkey: BLSPubkey, targetEpoch: Epoch): Uint8Array {
return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(targetEpoch), uintLen, "be")]));
return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(targetEpoch), uintLen, "be")]));
}

private decodeKey(key: Uint8Array): {pubkey: BLSPubkey; targetEpoch: Epoch} {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ export class AttestationLowerBoundRepository {
}

async set(pubkey: BLSPubkey, value: SlashingProtectionLowerBound): Promise<void> {
await this.db.put(this.encodeKey(pubkey), Buffer.from(this.type.serialize(value)), this.dbReqOpts);
await this.db.put(this.encodeKey(pubkey), this.type.serialize(value), this.dbReqOpts);
}

private encodeKey(pubkey: BLSPubkey): Uint8Array {
return encodeKey(this.bucket, Buffer.from(pubkey));
return encodeKey(this.bucket, pubkey);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ export class BlockBySlotRepository {
await this.db.batchPut(
blocks.map((block) => ({
key: this.encodeKey(pubkey, block.slot),
value: Buffer.from(this.type.serialize(block)),
value: this.type.serialize(block),
})),
this.dbReqOpts
);
Expand All @@ -66,7 +66,7 @@ export class BlockBySlotRepository {
}

private encodeKey(pubkey: BLSPubkey, slot: Slot): Uint8Array {
return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(slot), uintLen, "be")]));
return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(slot), uintLen, "be")]));
}

private decodeKey(key: Uint8Array): {pubkey: BLSPubkey; slot: Slot} {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,13 @@ class SpanDistanceRepository {
await this.db.batchPut(
values.map((value) => ({
key: this.encodeKey(pubkey, value.source),
value: Buffer.from(this.type.serialize(value.distance)),
value: this.type.serialize(value.distance),
})),
this.dbReqOpts
);
}

private encodeKey(pubkey: BLSPubkey, epoch: Epoch): Uint8Array {
return encodeKey(this.bucket, Buffer.concat([Buffer.from(pubkey), intToBytes(BigInt(epoch), 8, "be")]));
return encodeKey(this.bucket, Buffer.concat([pubkey, intToBytes(BigInt(epoch), 8, "be")]));
}
}
14 changes: 0 additions & 14 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2836,13 +2836,6 @@
resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-1.0.38.tgz#a81fd8606d481f873a3800c6ebae4f1d768a56a9"
integrity sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA==

"@types/buffer-xor@^2.0.0":
version "2.0.2"
resolved "https://registry.yarnpkg.com/@types/buffer-xor/-/buffer-xor-2.0.2.tgz#d8c463583b8fbb322ea824562dc78a0c3cea2ca6"
integrity sha512-OqdCua7QCTupPnJgmyGJUpxWgbuOi0IMIVslXTSePS2o+qDrDB6f2Pg44zRyqhUA5GbFAf39U8z0+mH4WG0fLQ==
dependencies:
"@types/node" "*"

"@types/cacheable-request@^6.0.1":
version "6.0.3"
resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.3.tgz#a430b3260466ca7b5ca5bfd735693b36e7a9d183"
Expand Down Expand Up @@ -4461,13 +4454,6 @@ buffer-xor@^1.0.3:
resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz"
integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=

buffer-xor@^2.0.2:
version "2.0.2"
resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-2.0.2.tgz"
integrity sha512-eHslX0bin3GB+Lx2p7lEYRShRewuNZL3fUl4qlVJGGiwoPGftmt8JQgk2Y9Ji5/01TnVDo33E5b5O3vUB1HdqQ==
dependencies:
safe-buffer "^5.1.1"

[email protected], buffer@^4.3.0:
version "4.9.2"
resolved "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz"
Expand Down

0 comments on commit bbf146f

Please sign in to comment.