diff --git a/bench/blst.bench.ts b/bench/blst.bench.ts new file mode 100644 index 0000000..5903339 --- /dev/null +++ b/bench/blst.bench.ts @@ -0,0 +1,107 @@ +import {describe, bench} from "@chainsafe/benchmark"; + +import * as blst from "../src/blst.ts"; +import * as other from "@chainsafe/blst"; + +describe("blst", () => { + const ikm = new Uint8Array(32); + bench("blst - keygen", () => { + blst.SecretKey.fromKeygen(ikm); + }); + bench("other - keygen", () => { + other.SecretKey.fromKeygen(ikm); + }); + + bench({ + id: "blst - sign", + beforeEach: () => { + const sk = blst.SecretKey.fromKeygen(ikm); + return {sk, msg: new Uint8Array(32)}; + }, + fn: ({sk, msg}) => { + sk.sign(msg); + }, + }); + bench({ + id: "other - sign", + beforeEach: () => { + const sk = other.SecretKey.fromKeygen(ikm); + return {sk, msg: new Uint8Array(32)}; + }, + fn: ({sk, msg}) => { + sk.sign(msg); + }, + }); + + bench({ + id: "blst - verify", + beforeEach: () => { + const sk = blst.SecretKey.fromKeygen(ikm); + const pk = sk.toPublicKey(); + const msg = new Uint8Array(32); + const sig = sk.sign(msg); + return {pk, msg, sig}; + }, + fn: ({pk, msg, sig}) => { + sig.verify(msg, pk, true, true); + }, + }); + bench({ + id: "other - verify", + beforeEach: () => { + const sk = other.SecretKey.fromKeygen(ikm); + const pk = sk.toPublicKey(); + const msg = new Uint8Array(32); + const sig = sk.sign(msg); + return {pk, msg, sig}; + }, + fn: ({pk, msg, sig}) => { + other.verify(msg, pk, sig, true, true); + }, + }); + + bench({ + id: "blst - aggregateVerify 16", + beforeEach: () => { + const sks = []; + const pks = []; + const sigs = []; + const msgs = []; + for (let i = 0; i < 16; i++) { + const sk = blst.SecretKey.fromKeygen(ikm); + sks.push(sk); + pks.push(sk.toPublicKey()); + const msg = new Uint8Array(32); + msgs.push(msg); + sigs.push(sk.sign(msg)); + } + const aggSig = blst.aggregateSignatures(sigs, true); + return {pks, msgs, aggSig}; + }, + fn: ({pks, msgs, aggSig}) => { + aggSig.aggregateVerify(msgs, pks, true, true); + }, + }); + bench({ + id: "other - aggregateVerify 16", + beforeEach: () => { + const sks = []; + const pks = []; + const sigs = []; + const msgs = []; + for (let i = 0; i < 16; i++) { + const sk = other.SecretKey.fromKeygen(ikm); + sks.push(sk); + pks.push(sk.toPublicKey()); + const msg = new Uint8Array(32); + msgs.push(msg); + sigs.push(sk.sign(msg)); + } + const aggSig = other.aggregateSignatures(sigs, true); + return {pks, msgs, aggSig}; + }, + fn: ({pks, msgs, aggSig}) => { + other.aggregateVerify(msgs, pks, aggSig, true, true); + }, + }); +}); diff --git a/build.zig b/build.zig index 8c541ce..a151443 100644 --- a/build.zig +++ b/build.zig @@ -6,6 +6,8 @@ pub fn build(b: *std.Build) void { const target = b.standardTargetOptions(.{}); const optimize = b.standardOptimizeOption(.{}); + const dep_blst = b.dependency("blst", .{}); + const dep_hashtree = b.dependency("hashtree", .{}); const dep_leveldb = b.dependency("leveldb", .{}); @@ -53,5 +55,6 @@ pub fn build(b: *std.Build) void { module_lodestar_z_bun.addImport("hashtree", dep_hashtree.module("hashtree")); module_lodestar_z_bun.addImport("lmdb", dep_lmdb.module("lmdb")); module_lodestar_z_bun.addImport("leveldb", dep_leveldb.module("leveldb")); + module_lodestar_z_bun.addImport("blst", dep_blst.module("blst")); module_lodestar_z_bun.addImport("ssz:persistent_merkle_tree", dep_ssz.module("persistent_merkle_tree")); } diff --git a/build.zig.zon b/build.zig.zon index d274a64..6b7db93 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -6,6 +6,10 @@ .fingerprint = 0xed854bdae2354180, .minimum_zig_version = "0.14.1", .dependencies = .{ + .blst = .{ + .url = "git+https://github.com/Chainsafe/blst-z#ea0ae943083ba24b6a3d60051f971d28d7ed560f", + .hash = "blst_z-0.0.0-td3FNAHEAACXKlQ_b8Dw6-4-_2zN1sxo5PcDIuZgnaMw", + }, .hashtree = .{ .url = "git+https://github.com/chainsafe/hashtree-z#43a58b0fd4813515cda3d0ffc622125243a01c54", .hash = "hashtree-0.1.0-sBOovrYSAAArAQVa-a6BhOPWPTrgKrJtufxWjQYMNNAN", diff --git a/bun.lock b/bun.lock index 7756b33..218729c 100644 --- a/bun.lock +++ b/bun.lock @@ -9,6 +9,7 @@ "devDependencies": { "@chainsafe/as-sha256": "^1.2.0", "@chainsafe/benchmark": "^2.0.1", + "@chainsafe/blst": "^2.2.0", "@chainsafe/hashtree": "^1.0.2", "@types/bun": "latest", "bigint-buffer": "^1.1.5", @@ -65,6 +66,22 @@ "@chainsafe/benchmark": ["@chainsafe/benchmark@2.0.1", "", { "dependencies": { "@actions/cache": "^4.0.0", "@actions/github": "^6.0.0", "@vitest/runner": "^2.1.8", "ajv": "^8.17.1", "aws-sdk": "^2.932.0", "cli-table3": "^0.6.5", "csv-parse": "^5.6.0", "csv-stringify": "^6.5.2", "debug": "^4.4.0", "glob": "^10.4.5", "log-symbols": "^7.0.0", "yaml": "^2.7.0", "yargs": "^17.7.2" }, "bin": { "benchmark": "bin/index.js" } }, "sha512-Noecu9z6kjXWdKl9ZL/PckJxfi+Ax4/8/i4F862jo3FZcViK8LWR5Byc8pKeNC5vcDMSP73/ME3vgUovYGqwUw=="], + "@chainsafe/blst": ["@chainsafe/blst@2.2.0", "", { "optionalDependencies": { "@chainsafe/blst-darwin-arm64": "2.2.0", "@chainsafe/blst-darwin-x64": "2.2.0", "@chainsafe/blst-linux-arm64-gnu": "2.2.0", "@chainsafe/blst-linux-arm64-musl": "2.2.0", "@chainsafe/blst-linux-x64-gnu": "2.2.0", "@chainsafe/blst-linux-x64-musl": "2.2.0", "@chainsafe/blst-win32-x64-msvc": "2.2.0" } }, "sha512-VBaQoNE2a9d9+skAjQKv3Suk0yGKqp3mZM0YWYJNPj/Ae/f6lAyeVSgKqo2LrsNQBzD/LqrJLKUY8rJT3vDKLA=="], + + "@chainsafe/blst-darwin-arm64": ["@chainsafe/blst-darwin-arm64@2.2.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-BOOy2KHbV028cioPWaAMqHdLRKd6/3XyEmUEcQC2E/SpyYLdNcaKiBUYIU4pT9CrWBbJJxX68UI+3vZVg0M8/w=="], + + "@chainsafe/blst-darwin-x64": ["@chainsafe/blst-darwin-x64@2.2.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-jG64cwIdPT7u/haRrW26tWCpfMfHBQCfGY169mFQifCwO4VEwvaiVBPOh5olFis6LjpcmD+O0jpM8GqrnsmUHQ=="], + + "@chainsafe/blst-linux-arm64-gnu": ["@chainsafe/blst-linux-arm64-gnu@2.2.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-L8xV2uuLn8we76vdzfryS9ePdheuZrmY6yArGUFaF1Uzcwml6V1/VvyPl9/uooo/YfVRIrvF/D+lQfI2GFAnhw=="], + + "@chainsafe/blst-linux-arm64-musl": ["@chainsafe/blst-linux-arm64-musl@2.2.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-0Vn0luxLYVgC3lvWT1MapFHSAoz99PldqjhilXTGv0AcAk/X5LXPH2RC9Dp2KJGqthyUkpbk1j47jUBfBI+BIg=="], + + "@chainsafe/blst-linux-x64-gnu": ["@chainsafe/blst-linux-x64-gnu@2.2.0", "", { "os": "linux", "cpu": "x64" }, "sha512-gEY/z2SDBA7kXtFEI9VNhWTJAIjx16jdeAyCaS2k4ACGurWZaWk+Ee4KniTsr4WieSqeuNTUr7Pdja0Sr4EKNQ=="], + + "@chainsafe/blst-linux-x64-musl": ["@chainsafe/blst-linux-x64-musl@2.2.0", "", { "os": "linux", "cpu": "x64" }, "sha512-58GKtiUmtVSuerRzPEcMNQZpICPboBKFnL7+1Wo+PSuajkvbae7tEFrFTtWeMoKIPgOEsPMnk96LF+0yNgavUg=="], + + "@chainsafe/blst-win32-x64-msvc": ["@chainsafe/blst-win32-x64-msvc@2.2.0", "", { "os": "win32", "cpu": "x64" }, "sha512-UFrZshl4dfX5Uh2zeKXAZtrkQ+otczHMON2tsrapQNICWmfHZrzE6pKuBL+9QeGAbgflwpbz7+D5nQRDpiuHxQ=="], + "@chainsafe/bun-ffi-z": ["@chainsafe/bun-ffi-z@1.1.4", "", { "peerDependencies": { "typescript": "^5" }, "bin": { "bun-ffi-z": "src/cli.ts" } }, "sha512-yegYWjcj9pMwrf554iuFGoUc+Kq8EWDrVpeGiPl/nmXu/lm5EaUzxsiLG6WR1J9tpTQk012213y9RA5PwDrUTQ=="], "@chainsafe/hashtree": ["@chainsafe/hashtree@1.0.2", "", { "optionalDependencies": { "@chainsafe/hashtree-darwin-arm64": "1.0.2", "@chainsafe/hashtree-linux-arm64-gnu": "1.0.2", "@chainsafe/hashtree-linux-arm64-musl": "1.0.2", "@chainsafe/hashtree-linux-x64-gnu": "1.0.2", "@chainsafe/hashtree-linux-x64-musl": "1.0.2", "@chainsafe/hashtree-win32-x64-msvc": "1.0.2" } }, "sha512-OaWjsZ6S/GaT2RvaqdpsF5Mux8qQOE2KbitX2yHmQJZNUZkdh7C3N4PA5LsvewqX+z8Nkv8mr1uSe0LSrHGiQw=="], diff --git a/package.json b/package.json index a5a79c2..99c6e7c 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "devDependencies": { "@chainsafe/as-sha256": "^1.2.0", "@chainsafe/benchmark": "^2.0.1", + "@chainsafe/blst": "^2.2.0", "@chainsafe/hashtree": "^1.0.2", "@types/bun": "latest", "bigint-buffer": "^1.1.5" diff --git a/src/binding.ts b/src/binding.ts index 71e6c7f..9203396 100644 --- a/src/binding.ts +++ b/src/binding.ts @@ -397,6 +397,213 @@ const fns = { ], "returns": "i32" }, + "secretKeyFromBytes": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "secretKeyToBytes": { + "args": [ + "ptr", + "ptr" + ], + "returns": "void" + }, + "secretKeyKeyGen": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "secretKeyKeyGenV3": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "secretKeyKeyGenV45": { + "args": [ + "ptr", + "ptr", + "u32", + "ptr", + "u32" + ], + "returns": "i32" + }, + "secretKeyDeriveMasterEip2333": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "secretKeyDeriveChildEip2333": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "secretKeyToPublicKey": { + "args": [ + "ptr", + "ptr" + ], + "returns": "void" + }, + "secretKeySign": { + "args": [ + "ptr", + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "publicKeyFromBytes": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "publicKeyToBytes": { + "args": [ + "ptr", + "ptr" + ], + "returns": "void" + }, + "publicKeyValidate": { + "args": [ + "ptr" + ], + "returns": "i32" + }, + "aggregateWithRandomness": { + "args": [ + "ptr", + "ptr", + "u32", + "ptr", + "ptr", + "bool", + "bool" + ], + "returns": "i32" + }, + "publicKeyAggregateWithRandomness": { + "args": [ + "ptr", + "ptr", + "u32", + "bool" + ], + "returns": "i32" + }, + "publicKeyAggregate": { + "args": [ + "ptr", + "ptr", + "u32", + "bool" + ], + "returns": "i32" + }, + "signatureFromBytes": { + "args": [ + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "signatureToBytes": { + "args": [ + "ptr", + "ptr" + ], + "returns": "void" + }, + "signatureValidate": { + "args": [ + "ptr", + "bool" + ], + "returns": "i32" + }, + "signatureVerify": { + "args": [ + "ptr", + "bool", + "ptr", + "u32", + "ptr", + "bool" + ], + "returns": "i32" + }, + "signatureAggregateVerify": { + "args": [ + "ptr", + "bool", + "ptr", + "ptr", + "u32", + "bool" + ], + "returns": "i32" + }, + "signatureFastAggregateVerify": { + "args": [ + "ptr", + "bool", + "ptr", + "ptr", + "u32" + ], + "returns": "i32" + }, + "signatureVerifyMultipleAggregateSignatures": { + "args": [ + "u32", + "ptr", + "ptr", + "bool", + "ptr", + "bool" + ], + "returns": "i32" + }, + "signatureAggregateWithRandomness": { + "args": [ + "ptr", + "ptr", + "u32", + "bool" + ], + "returns": "i32" + }, + "signatureAggregate": { + "args": [ + "ptr", + "ptr", + "u32", + "bool" + ], + "returns": "i32" + }, "err_name": { "args": [ "u16" diff --git a/src/blst.ts b/src/blst.ts new file mode 100644 index 0000000..d9739d0 --- /dev/null +++ b/src/blst.ts @@ -0,0 +1,7 @@ +export * from "./blst/publicKey.ts"; +export * from "./blst/secretKey.ts"; +export * from "./blst/signature.ts"; +export * from "./blst/aggregateWithRandomness.ts"; +export * from "./blst/verifyMultipleAggregateSignatures.ts"; +export * from "./blst/aggregate.ts"; +export * from "./blst/const.ts"; diff --git a/src/blst/aggregate.ts b/src/blst/aggregate.ts new file mode 100644 index 0000000..078c7f4 --- /dev/null +++ b/src/blst/aggregate.ts @@ -0,0 +1,138 @@ +import {binding} from "../binding.js"; +import {pksU8, sigsU8, writePublicKeys, writeSignatures} from "./buffer.ts"; +import {MAX_AGGREGATE_PER_JOB, PUBLIC_KEY_SIZE, SIGNATURE_LENGTH} from "./const.js"; +import {PublicKey} from "./publicKey.js"; +import {Signature} from "./signature.js"; +import {writePublicKeysReference, writeSignaturesReference, writeUint8ArrayArray} from "./writers.ts"; + +// global public keys reference to be reused across multiple calls +// each 2 items are 8 bytes, store the reference of each public key +const publicKeysRef = new Uint32Array(MAX_AGGREGATE_PER_JOB * 2); +const signaturesRef = new Uint32Array(MAX_AGGREGATE_PER_JOB * 2); + +/** + * Aggregate multiple public keys into a single public key. + * + * If `pks_validate` is `true`, the public keys will be infinity and group checked. + */ +export function aggregatePublicKeys(pks: Array, pksValidate?: boolean | undefined | null): PublicKey { + if (pks.length === 0) { + throw new Error("At least one public key is required"); + } + + const resultPks: PublicKey[] = []; + + for (let i = 0; i < pks.length; i += MAX_AGGREGATE_PER_JOB) { + const pksBatch = pks.slice(i, Math.min(pks.length, i + MAX_AGGREGATE_PER_JOB)); + writePublicKeys(pksBatch); + const outPk = new PublicKey(new Uint8Array(PUBLIC_KEY_SIZE)); + const res = binding.publicKeyAggregate(outPk.ptr, pksU8, pksBatch.length, pksValidate ?? false); + + if (res !== 0) { + throwErr(res); + // throw new Error(`Failed to aggregate public keys: ${res}`); + } + resultPks.push(outPk); + } + + return resultPks.length === 1 ? resultPks[0] : aggregatePublicKeys(resultPks, pksValidate); +} + +/** + * Aggregate multiple signatures into a single signature. + * + * If `sigs_groupcheck` is `true`, the signatures will be group checked. + */ +export function aggregateSignatures(sigs: Array, sigsGroupcheck?: boolean | undefined | null): Signature { + const resultSig: Signature[] = []; + + if (sigs.length === 0) { + throw new Error("At least one signature is required"); + } + + for (let i = 0; i < sigs.length; i += MAX_AGGREGATE_PER_JOB) { + const sigsBatch = sigs.slice(i, Math.min(sigs.length, i + MAX_AGGREGATE_PER_JOB)); + writeSignatures(sigsBatch); + const outSig = new Signature(new Uint8Array(SIGNATURE_LENGTH)); + const res = binding.signatureAggregate(outSig.ptr, sigsU8, sigsBatch.length, sigsGroupcheck ?? false); + + if (res !== 0) { + throw new Error(`Failed to aggregate signatures: ${res}`); + } + resultSig.push(outSig); + } + + return resultSig.length === 1 ? resultSig[0] : aggregateSignatures(resultSig, sigsGroupcheck); +} + +/** + * Aggregate multiple serialized public keys into a single public key. + * + * If `pks_validate` is `true`, the public keys will be infinity and group checked. + */ +export function aggregateSerializedPublicKeys( + pks: Array, + pksValidate?: boolean | undefined | null +): PublicKey { + if (pks.length < 1) { + throw new Error("At least one public key is required"); + } + + const resultPublicKeys: PublicKey[] = []; + + for (let i = 0; i < pks.length; i += MAX_AGGREGATE_PER_JOB) { + const pksBatch = pks.slice(i, Math.min(pks.length, i + MAX_AGGREGATE_PER_JOB)); + const pksRef = writeSerializedPublicKeysReference(pksBatch); + const outPk = new PublicKey(new Uint8Array(PUBLIC_KEY_SIZE)); + const res = binding.aggregatePublicKeys(outPk.ptr, pksRef, pksBatch.length, pks[0].length, pksValidate ?? false); + + if (res !== 0) { + throw new Error(`Failed to aggregate serialized public keys: ${res}`); + } + resultPublicKeys.push(outPk); + } + + return resultPublicKeys.length === 1 ? resultPublicKeys[0] : aggregatePublicKeys(resultPublicKeys, pksValidate); +} + +/** + * Aggregate multiple serialized signatures into a single signature. + * + * If `sigs_groupcheck` is `true`, the signatures will be group checked. + */ +export function aggregateSerializedSignatures( + sigs: Array, + sigsGroupcheck?: boolean | undefined | null +): Signature { + if (sigs.length < 1) { + throw new Error("At least one signature is required"); + } + + const resultSignatures: Signature[] = []; + + for (let i = 0; i < sigs.length; i += MAX_AGGREGATE_PER_JOB) { + const sigsBatch = sigs.slice(i, Math.min(sigs.length, i + MAX_AGGREGATE_PER_JOB)); + const sigsRef = writeSerializedSignaturesReference(sigsBatch); + const outSig = new Signature(new Uint8Array(SIGNATURE_LENGTH)); + const res = binding.signatureAggregate(outSig.ptr, sigsRef, sigsBatch.length, sigsGroupcheck ?? false); + + if (res !== 0) { + throw new Error(`Failed to aggregate serialized signatures: ${res}`); + } + resultSignatures.push(outSig); + } + + return resultSignatures.length === 1 + ? resultSignatures[0] + : aggregateSerializedSignatures(resultSignatures, sigsGroupcheck); +} + +function writeSerializedPublicKeysReference(pks: Uint8Array[]): Uint32Array { + writeUint8ArrayArray(pks, MAX_AGGREGATE_PER_JOB, "public key", publicKeysRef); + return publicKeysRef.subarray(0, pks.length * 2); +} + +function writeSerializedSignaturesReference(sigs: Uint8Array[]): Uint32Array { + writeUint8ArrayArray(sigs, MAX_AGGREGATE_PER_JOB, "signature", signaturesRef); + return signaturesRef.subarray(0, sigs.length * 2); +} diff --git a/src/blst/aggregateWithRandomness.ts b/src/blst/aggregateWithRandomness.ts new file mode 100644 index 0000000..911bccd --- /dev/null +++ b/src/blst/aggregateWithRandomness.ts @@ -0,0 +1,160 @@ +import {JSCallback} from "bun:ffi"; +import {binding} from "../binding.js"; +import {pksU8, writePublicKeys} from "./buffer.ts"; +import {MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB, PUBLIC_KEY_SIZE, SIGNATURE_LENGTH} from "./const.js"; +import {PublicKey} from "./publicKey.js"; +import {Signature} from "./signature.js"; +import {writeNumber, writePublicKeysReference, writeReference, writeSignaturesReference} from "./writers.ts"; + +export interface PkAndSerializedSig { + pk: PublicKey; + sig: Uint8Array; +} + +export interface PkAndSig { + pk: PublicKey; + sig: Signature; +} + +/** + * Aggregate multiple public keys and multiple serialized signatures into a single blinded public key and blinded signature. + * + * Signatures are deserialized and validated with infinity and group checks before aggregation. + * TODO: see if we can support unlimited sets + */ +export function aggregateWithRandomness(sets: Array): PkAndSig { + if (sets.length > MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB) { + throw new Error(`Number of PkAndSerializedSig exceeds the maximum of ${MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB}`); + } + + if (sets.length === 0) { + throw new Error("At least one PkAndSerializedSig is required"); + } + + const pksRef = writePublicKeysReference(sets.map((s) => s.pk)); + const sigsRef = writeSignaturesReference(sets.map((s) => Signature.fromBytes(s.sig, true))); + const pkOut = new PublicKey(new Uint8Array(PUBLIC_KEY_SIZE)); + const sigOut = new Signature(new Uint8Array(SIGNATURE_LENGTH)); + + const res = binding.aggregateWithRandomness(pkOut.ptr, sigOut.ptr, sets.length, pksRef, sigsRef, false, false); + + if (res) { + throwErr(res); + throw new Error("Failed to aggregate with randomness res = " + res); + } + + return {pk: pkOut, sig: sigOut}; +} + +/** + * Aggregate multiple public keys and multiple serialized signatures into a single blinded public key and blinded signature. + * + * Signatures are deserialized and validated with infinity and group checks before aggregation. + * TODO: this api only works with MacOS not Linux + * got this error on Linux: + * ``` + * thread 1893 panic: reached unreachable code + * Panicked during a panic. Aborting. + * ``` + */ +export function asyncAggregateWithRandomness(sets: Array): Promise { + if (sets.length > MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB) { + throw new Error(`Number of PkAndSerializedSig exceeds the maximum of ${MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB}`); + } + + if (sets.length === 0) { + throw new Error("At least one PkAndSerializedSig is required"); + } + + // 1s timeout + const TIMEOUT_MS = 1_000; + const pkOut = new PublicKey(new Uint8Array(PUBLIC_KEY_SIZE)); + const sigOut = new Signature(new Uint8Array(SIGNATURE_LENGTH)); + + return new Promise((resolve, reject) => { + let jscallback: JSCallback | null = null; + const timeout = setTimeout(() => { + if (jscallback) { + jscallback.close(); + jscallback = null; + } + reject(`Timeout after ${timeout}ms`); + }, TIMEOUT_MS); + + // it's important to always close the callback + jscallback = new JSCallback( + (res: number): void => { + clearTimeout(timeout); + const _res = res; + if (jscallback) { + jscallback.close(); + jscallback = null; + } + // setTimeout to unblock zig callback thread, not sure why "res" can only be accessed once + setTimeout(() => { + if (_res === 0) { + resolve({pk: pkOut, sig: sigOut}); + } else { + reject(new Error("Failed to aggregate with randomness")); + } + }, 0); + }, + { + args: ["u32"], + returns: "void", + threadsafe: true, + } + ); + + const refs = new Uint32Array(sets.length * 2); + writePkAndSerializedSigsReference(sets, refs); + + const res = binding.aggregateWithRandomness( + refs, + sets.length, + pkOut.ptr, + sigOut.ptr, + // it's noted in bun:ffi doc that using JSCallback.prototype.ptr is faster than JSCallback object + jscallback.ptr + ); + + if (res !== 0) { + clearTimeout(timeout); + if (jscallback) { + jscallback.close(); + jscallback = null; + } + reject(`Failed to aggregate with randomness res = ${res}`); + } + }); +} + +// global PkAndSerializedSig data to be reused across multiple calls +// each PkAndSerializedSig are 24 bytes +const setsData = new Uint32Array(MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB * 6); +function writePkAndSerializedSigsReference(sets: PkAndSerializedSig[], out: Uint32Array): void { + const offset = 0; + for (const [i, set] of sets.entries()) { + writePkAndSerializedSigReference(set, setsData, offset + i * 6); + // write pointer, each PkAndSerializedSig takes 8 bytes = 2 * uint32 + writeReference(setsData.subarray(i * 6, i * 6 + 6), out, i * 2); + } +} + +// each PkAndSerializedSig needs 16 bytes = 4 * uint32 for references +/** + * Map an instance of PkAndSerializedSig in typescript to this struct in Zig: + * ```zig + * const PkAndSerializedSigC = extern struct { + pk: *pk_aff_type, + sig: [*c]const u8, + sig_len: usize, + }; + * ``` + * + */ +function writePkAndSerializedSigReference(set: PkAndSerializedSig, out: Uint32Array, offset: number): void { + writeReference(set.pk, out, offset); + writeReference(set.sig, out, offset + 2); + writeNumber(set.sig.length, out, offset + 4); +} diff --git a/src/blst/buffer.ts b/src/blst/buffer.ts new file mode 100644 index 0000000..2cf58e8 --- /dev/null +++ b/src/blst/buffer.ts @@ -0,0 +1,56 @@ +import {type Pointer, read} from "bun:ffi"; +import {MAX_AGGREGATE_PER_JOB, PUBLIC_KEY_SIZE, SIGNATURE_LENGTH} from "./const.js"; +import type {PublicKey} from "./publicKey.js"; +import type {Signature} from "./signature.js"; + +/** + * Write a pointer value to a buffer at the specified offset. + * NOTE: Only works with pointers of size divisible by 4. + */ +function writePtr(ptr: Pointer, size: number, buf: Uint32Array, offset: number): void { + for (let i = 0; i < size / 4; i++) { + buf[offset + i] = read.u32(ptr, i * 4); + } +} + +// Operations involving multiple pks require pks in contiguous memory. +// This buffer is (re)used for this purpose. +const pksBuffer = new ArrayBuffer(PUBLIC_KEY_SIZE * MAX_AGGREGATE_PER_JOB); +const sigsBuffer = new ArrayBuffer(SIGNATURE_LENGTH * MAX_AGGREGATE_PER_JOB); +export const pksU8 = new Uint8Array(pksBuffer); +const pksU32 = new Uint32Array(pksBuffer); +export const sigsU8 = new Uint8Array(sigsBuffer); + +export function writePublicKeys(pks: PublicKey[]): void { + for (const [i, pk] of pks.entries()) { + writePublicKey(pk, i); + } +} + +function writePublicKey(pk: PublicKey, i: number): void { + if (typeof pk.ptr === "number") { + writePtr(pk.ptr, PUBLIC_KEY_SIZE, pksU32, (i * PUBLIC_KEY_SIZE) / 4); + } else { + pksU8.set(pk.ptr, i * PUBLIC_KEY_SIZE); + } +} + +export function writeSignatures(sigs: Signature[]): void { + for (const [i, sig] of sigs.entries()) { + writeSignature(sig, i); + } +} + +function writeSignature(sig: Signature, i: number): void { + sigsU8.set(sig.ptr, i * SIGNATURE_LENGTH); +} + +// Operations involving multiple msgs require msgs in contiguous memory. +const msgsBuffer = new ArrayBuffer(32 * MAX_AGGREGATE_PER_JOB); +export const msgsU8 = new Uint8Array(msgsBuffer); + +export function writeMessages(msgs: Uint8Array[]): void { + for (const [i, msg] of msgs.entries()) { + msgsU8.set(msg, i * 32); + } +} diff --git a/src/blst/const.ts b/src/blst/const.ts new file mode 100644 index 0000000..c946b36 --- /dev/null +++ b/src/blst/const.ts @@ -0,0 +1,11 @@ +export const SECRET_KEY_SIZE = 32; +export const PUBLIC_KEY_SIZE = 96; +export const PUBLIC_KEY_COMPRESS_SIZE = 48; + +export const SIGNATURE_LENGTH = 192; +export const SIGNATURE_LENGTH_COMPRESSED = 96; + +export const MESSAGE_LENGTH = 32; +export const MAX_SIGNATURE_SETS_PER_JOB = 128; +export const MAX_AGGREGATE_WITH_RANDOMNESS_PER_JOB = 128; +export const MAX_AGGREGATE_PER_JOB = 128; diff --git a/src/blst/publicKey.ts b/src/blst/publicKey.ts new file mode 100644 index 0000000..a212afb --- /dev/null +++ b/src/blst/publicKey.ts @@ -0,0 +1,60 @@ +import type {Pointer} from "bun:ffi"; +import {binding} from "../binding.ts"; +import {PUBLIC_KEY_COMPRESS_SIZE, PUBLIC_KEY_SIZE} from "./const.ts"; +import {assertSuccess, fromHex, toHex} from "./util.ts"; + +export class PublicKey { + // this is mapped directly to `*const PublicKey` in Zig + ptr: Uint8Array | Pointer; + + constructor(ptr: Uint8Array | Pointer) { + this.ptr = ptr; + } + + /** + * Deserialize a public key from a byte array. + * + * If `pk_validate` is `true`, the public key will be infinity and group checked. + */ + static fromBytes(bytes: Uint8Array, pkValidate?: boolean | undefined | null): PublicKey { + if (bytes.length !== PUBLIC_KEY_COMPRESS_SIZE) { + throw new Error("Invalid encoding"); + } + + const buffer = new Uint8Array(PUBLIC_KEY_SIZE); + assertSuccess(binding.publicKeyFromBytes(buffer, bytes, bytes.length)); + + if (pkValidate) { + assertSuccess(binding.publicKeyValidate(buffer)); + } + return new PublicKey(buffer); + } + + /** + * Deserialize a public key from a hex string. + * + * If `pk_validate` is `true`, the public key will be infinity and group checked. + */ + static fromHex(hex: string, pkValidate?: boolean | undefined | null): PublicKey { + const bytes = fromHex(hex); + return PublicKey.fromBytes(bytes, pkValidate); + } + + /** Serialize a public key to a byte array. */ + toBytes(): Uint8Array { + const out = new Uint8Array(PUBLIC_KEY_COMPRESS_SIZE); + binding.publicKeyToBytes(out, this.ptr); + return out; + } + + /** Serialize a public key to a hex string. */ + toHex(): string { + const bytes = this.toBytes(); + return toHex(bytes); + } + + /** Validate a public key with infinity and group check. */ + keyValidate(): void { + assertSuccess(binding.publicKeyValidate(this.ptr)); + } +} diff --git a/src/blst/secretKey.ts b/src/blst/secretKey.ts new file mode 100644 index 0000000..5ede0e2 --- /dev/null +++ b/src/blst/secretKey.ts @@ -0,0 +1,96 @@ +import type {Pointer} from "bun:ffi"; +import {binding} from "../binding.js"; +import {PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, SIGNATURE_LENGTH} from "./const.js"; +import {PublicKey} from "./publicKey.js"; +import {Signature} from "./signature.js"; +import {assertSuccess, fromHex, toHex} from "./util.js"; + +export class SecretKey { + private ptr: Uint8Array | Pointer; + + private constructor(ptr: Uint8Array | Pointer) { + this.ptr = ptr; + } + + /** + * Generate a secret key deterministically from a secret byte array `ikm`. + * + * `ikm` must be at least 32 bytes long. + */ + static fromKeygen(ikm: Uint8Array): SecretKey { + const buffer = new Uint8Array(SECRET_KEY_SIZE); + assertSuccess(binding.secretKeyKeyGen(buffer, ikm, ikm.length)); + + return new SecretKey(buffer); + } + + /** + * Generate a master secret key deterministically from a secret byte array `ikm` based on EIP-2333. + * + * `ikm` must be at least 32 bytes long. + * + * See https://eips.ethereum.org/EIPS/eip-2333 + */ + static deriveMasterEip2333(ikm: Uint8Array): SecretKey { + const buffer = new Uint8Array(SECRET_KEY_SIZE); + assertSuccess(binding.secretKeyDeriveMasterEip2333(buffer, ikm, ikm.length)); + + return new SecretKey(buffer); + } + + /** + * Derive a child secret key from a parent secret key based on EIP-2333. + * + * See https://eips.ethereum.org/EIPS/eip-2333 + */ + deriveChildEip2333(index: number): SecretKey { + const buffer = new Uint8Array(SECRET_KEY_SIZE); + binding.secretKeyDeriveChildEip2333(buffer, this.ptr, index); + return new SecretKey(buffer); + } + + /** Deserialize a secret key from a byte array. */ + static fromBytes(bytes: Uint8Array): SecretKey { + const buffer = new Uint8Array(SECRET_KEY_SIZE); + assertSuccess(binding.secretKeyFromBytes(buffer, bytes, bytes.length)); + + return new SecretKey(buffer); + } + + /** Deserialize a secret key from a hex string. */ + static fromHex(hex: string): SecretKey { + const bytes = fromHex(hex); + return SecretKey.fromBytes(bytes); + } + + /** Serialize a secret key to a byte array. */ + toBytes(): Uint8Array { + const bytes = new Uint8Array(SECRET_KEY_SIZE); + binding.secretKeyToBytes(bytes, this.ptr); + return bytes; + } + + /** Serialize a secret key to a hex string. */ + toHex(): string { + const bytes = this.toBytes(); + return toHex(bytes); + } + + /** Return the corresponding public key */ + toPublicKey(): PublicKey { + const pk = new Uint8Array(PUBLIC_KEY_SIZE); + binding.secretKeyToPublicKey(pk, this.ptr); + return new PublicKey(pk); + } + + /** Return the signature */ + sign(msg: Uint8Array): Signature { + if (msg.length === 0) { + throw new Error("Message cannot be empty"); + } + + const sig = new Uint8Array(SIGNATURE_LENGTH); + binding.secretKeySign(sig, this.ptr, msg, msg.length); + return new Signature(sig); + } +} diff --git a/src/blst/signature.ts b/src/blst/signature.ts new file mode 100644 index 0000000..b12304d --- /dev/null +++ b/src/blst/signature.ts @@ -0,0 +1,157 @@ +import type {Pointer} from "bun:ffi"; +import {binding} from "../binding.js"; +import {msgsU8, pksU8, writeMessages, writePublicKeys} from "./buffer.js"; +import {SIGNATURE_LENGTH, SIGNATURE_LENGTH_COMPRESSED} from "./const.js"; +import type {PublicKey} from "./publicKey.js"; +import {assertSuccess, fromHex, toHex} from "./util.js"; + +export class Signature { + // this is mapped directly to `*const Signature` in Zig + ptr: Uint8Array | Pointer; + + constructor(ptr: Uint8Array | Pointer) { + this.ptr = ptr; + } + + /** + * Deserialize a signature from a byte array. + * + * If `sig_validate` is `true`, the public key will be infinity and group checked. + * + * If `sig_infcheck` is `false`, the infinity check will be skipped. + */ + static fromBytes( + bytes: Uint8Array, + sigValidate?: boolean | undefined | null, + sigInfcheck?: boolean | undefined | null + ): Signature { + const buffer = new Uint8Array(SIGNATURE_LENGTH); + const sig = new Signature(buffer); + + assertSuccess(binding.signatureFromBytes(sig.ptr, bytes, bytes.length)); + + if (sigValidate) { + assertSuccess(binding.signatureValidate(sig.ptr, sigInfcheck ?? true)); + } + + return sig; + } + + /** + * Deserialize a signature from a hex string. + * + * If `sig_validate` is `true`, the public key will be infinity and group checked. + * + * If `sig_infcheck` is `false`, the infinity check will be skipped. + */ + static fromHex( + hex: string, + sigValidate?: boolean | undefined | null, + sigInfcheck?: boolean | undefined | null + ): Signature { + const bytes = fromHex(hex); + return Signature.fromBytes(bytes, sigValidate, sigInfcheck); + } + + /** Serialize a signature to a byte array. */ + toBytes(): Uint8Array { + const out = new Uint8Array(SIGNATURE_LENGTH_COMPRESSED); + binding.signatureToBytes(out, this.ptr); + return out; + } + + /** Serialize a signature to a hex string. */ + toHex(): string { + const bytes = this.toBytes(); + return toHex(bytes); + } + + /** + * Validate a signature with infinity and group check. + * + * If `sig_infcheck` is `false`, the infinity check will be skipped. + */ + sigValidate(sigInfcheck?: boolean | undefined | null): void { + assertSuccess(binding.signatureValidate(this.ptr, sigInfcheck ?? true)); + } + + /** + * Verify a signature against a message and public key. + * + * If `pk_validate` is `true`, the public key will be infinity and group checked. + * + * If `sig_groupcheck` is `true`, the signature will be group checked. + */ + verify( + msg: Uint8Array, + pk: PublicKey, + pkValidate?: boolean | undefined | null, + sigGroupcheck?: boolean | undefined | null + ): boolean { + if (msg.length === 0) { + throw new Error("Message cannot be empty"); + } + + const res = binding.signatureVerify(this.ptr, sigGroupcheck ?? false, msg, msg.length, pk.ptr, pkValidate ?? false); + return res === 0; + } + + /** + * Verify an aggregated signature against a single message and multiple public keys. + * + * Proof-of-possession is required for public keys. + * + * If `sigs_groupcheck` is `true`, the signatures will be group checked. + */ + fastAggregateVerify(msg: Uint8Array, pks: PublicKey[], sigsGroupcheck?: boolean | undefined | null): boolean { + if (msg.length !== 32) { + throw new Error("Message must be 32 bytes long"); + } + + writePublicKeys(pks); + const res = binding.signatureFastAggregateVerify(this.ptr, sigsGroupcheck ?? false, msg, pksU8, pks.length); + return res === 0; + } + + /** + * Verify an aggregated signature against multiple messages and multiple public keys. + * + * If `pk_validate` is `true`, the public keys will be infinity and group checked. + * + * If `sigs_groupcheck` is `true`, the signatures will be group checked. + * + * The down side of zig binding is all messages have to be the same length. + */ + aggregateVerify( + msgs: Array, + pks: Array, + pkValidate?: boolean | undefined | null, + sigsGroupcheck?: boolean | undefined | null + ): boolean { + if (msgs.length < 1) { + // this is the same to the original napi-rs blst-ts + return false; + } + if (msgs.length !== pks.length) { + throw new Error("Number of messages must be equal to the number of public keys"); + } + + for (let i = 0; i < msgs.length; i++) { + if (msgs[i].length !== 32) { + throw new Error("All messages must be 32 bytes long"); + } + } + + writeMessages(msgs); + writePublicKeys(pks); + const res = binding.signatureAggregateVerify( + this.ptr, + sigsGroupcheck ?? false, + msgsU8, + pksU8, + pks.length, + pkValidate ?? false + ); + return res === 0; + } +} diff --git a/src/blst/util.ts b/src/blst/util.ts new file mode 100644 index 0000000..0ac9142 --- /dev/null +++ b/src/blst/util.ts @@ -0,0 +1,15 @@ +import {throwErr} from "../common.ts"; + +export function toHex(buffer: Uint8Array): string { + return "0x" + buffer.toHex(); +} + +export function fromHex(hex: string): Uint8Array { + return Uint8Array.fromHex(hex.replace("0x", "")); +} + +export function assertSuccess(blstErrorCode: number): void { + if (blstErrorCode !== 0) { + throw throwErr(blstErrorCode); + } +} diff --git a/src/blst/verifyMultipleAggregateSignatures.ts b/src/blst/verifyMultipleAggregateSignatures.ts new file mode 100644 index 0000000..f1c2b7c --- /dev/null +++ b/src/blst/verifyMultipleAggregateSignatures.ts @@ -0,0 +1,52 @@ +import {binding} from "../binding.js"; +import {msgsU8, writeMessages} from "./buffer.ts"; +import {MAX_SIGNATURE_SETS_PER_JOB} from "./const.js"; +import type {PublicKey} from "./publicKey.js"; +import type {Signature} from "./signature.js"; +import {pairing} from "./util.js"; +import {writePublicKeysReference, writeReference, writeSignaturesReference} from "./writers.ts"; + +export interface SignatureSet { + msg: Uint8Array; + pk: PublicKey; + sig: Signature; +} + +/** + * Verify multiple aggregated signatures against multiple messages and multiple public keys. + * + * If `pks_validate` is `true`, the public keys will be infinity and group checked. + * + * If `sigs_groupcheck` is `true`, the signatures will be group checked. + * + * See https://ethresear.ch/t/fast-verification-of-multiple-bls-signatures/5407 + */ +export function verifyMultipleAggregateSignatures( + sets: SignatureSet[], + pksValidate?: boolean | undefined | null, + sigsGroupcheck?: boolean | undefined | null +): boolean { + if (sets.length > MAX_SIGNATURE_SETS_PER_JOB) { + throw new Error(`Number of signature sets exceeds the maximum of ${MAX_SIGNATURE_SETS_PER_JOB}`); + } + + const msgLength = 32; + for (const set of sets) { + if (set.msg.length !== msgLength) { + throw new Error("All messages must be 32 bytes"); + } + } + const pksRef = writePublicKeysReference(sets.map((s) => s.pk)); + const sigsRef = writeSignaturesReference(sets.map((s) => s.sig)); + writeMessages(sets.map((s) => s.msg)); + + const res = binding.signatureVerifyMultipleAggregateSignatures( + sets.length, + msgsU8, + pksRef, + pksValidate ?? false, + sigsRef, + sigsGroupcheck ?? false + ); + return res === 0; +} diff --git a/src/blst/writers.ts b/src/blst/writers.ts new file mode 100644 index 0000000..ea6fba9 --- /dev/null +++ b/src/blst/writers.ts @@ -0,0 +1,99 @@ +import {type Pointer, ptr, read} from "bun:ffi"; +import type {PublicKey} from "./publicKey.js"; +import type {Signature} from "./signature.js"; + +/** + * Write reference of a data to the provided Uint32Array at offset + * TODO: may accept data + offset and compute pointer from the parent typed array. This will help to avoid `subarray()` calls. + */ +export function writeReference(data: Uint8Array | Uint32Array, out: Uint32Array, offset: number): void { + // 2 items of uint32 means 8 of uint8 + if (offset + 2 > out.length) { + throw new Error("Output buffer must be at least 8 bytes long"); + } + + const pointer = ptr(data); + + writeNumber(pointer, out, offset); +} + +/** + * Write a number to "usize" in Zig, which takes 8 bytes + */ +export function writeNumber(data: number, out: Uint32Array, offset: number): void { + if (offset + 2 > out.length) { + throw new Error("Output buffer must be at least 8 bytes long"); + } + + // TODO: check endianess, this is for little endian + out[offset] = data & 0xffffffff; + out[offset + 1] = Math.floor(data / Math.pow(2, 32)); +} + +/** + * Common util to map Uint8Array[] to `[*c][*c]const u8` in Zig + */ +export function writeUint8ArrayArray(data: Uint8Array[], maxItem: number, tag: string, out: Uint32Array): void { + if (data.length > maxItem) { + throw new Error(`Too many ${tag}s, max is ${maxItem}`); + } + + if (out.length < data.length * 2) { + throw new Error(`Output buffer must be at least double data size. out: ${out.length}, data: ${data.length}`); + } + + const pkLength = data[0].length; + + for (let i = 0; i < data.length; i++) { + if (data[i].length !== pkLength) { + throw new Error(`All ${tag}s must be the same length`); + } + writeReference(data[i], out, i * 2); + } +} +/** + * Write a pointer value to a buffer at the specified offset. + * NOTE: Only works with pointers of size divisible by 4. + */ +export function writePtr(ptr: Pointer, size: number, buf: Uint32Array, offset: number): void { + for (let i = 0; i < size / 4; i++) { + buf[offset + i] = read.u32(ptr, i * 4); + } +} + +const MAX_PKS = 128; +// global public key references to be reused across multiple calls +const publicKeysRefs = new Uint32Array(MAX_PKS * 2); + +/** + * Map PublicKey[] in typescript to [*c]const *PublicKeyType in Zig. + */ +export function writePublicKeysReference(pks: PublicKey[]): Uint32Array { + if (pks.length > MAX_PKS) { + throw new Error(`Too many public keys, max is ${MAX_PKS}`); + } + + for (let i = 0; i < pks.length; i++) { + writeReference(pks[i].ptr, publicKeysRefs, i * 2); + } + + return publicKeysRefs.subarray(0, pks.length * 2); +} + +// global public key references to be reused across multiple calls +const signaturesRefs = new Uint32Array(MAX_PKS * 2); + +/** + * Map Signature[] in typescript to [*c]const *SignatureType in Zig. + */ +export function writeSignaturesReference(sigs: Signature[]): Uint32Array { + if (sigs.length > MAX_PKS) { + throw new Error(`Too many signatures, max is ${MAX_PKS}`); + } + + for (let i = 0; i < sigs.length; i++) { + writeReference(sigs[i].ptr, signaturesRefs, i * 2); + } + + return signaturesRefs.subarray(0, sigs.length * 2); +} diff --git a/src/index.ts b/src/index.ts index 0790591..03e4af5 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,3 +3,4 @@ export * from "./hashtree.ts"; export * from "./leveldb.ts"; export * from "./lmdb.ts"; export * from "./persistent_merkle_tree.ts"; +export * from "./blst.ts"; diff --git a/test/blst/__fixtures__/index.ts b/test/blst/__fixtures__/index.ts new file mode 100644 index 0000000..79119e4 --- /dev/null +++ b/test/blst/__fixtures__/index.ts @@ -0,0 +1,68 @@ +import {fromHex, getFilledUint8, getTestSet, sullyUint8Array} from "../utils/index.js"; + +export const invalidInputs: [string, any][] = [ + // ["boolean", true], + // ["number", 2], + // ["bigint", BigInt("2")], + // ["symbol", Symbol("foo")], + // ["null", null], + // ["undefined", undefined], + // ["object", {foo: "bar"}], + // ["proxy", new Proxy({foo: "bar"}, {})], + // ["date", new Date("1982-03-24T16:00:00-06:00")], + // [ + // "function", + // () => { + // /* no-op */ + // }, + // ], + // ["NaN", Number.NaN], + // ["promise", Promise.resolve()], + // ["Uint16Array", new Uint16Array()], + // ["Uint32Array", new Uint32Array()], + // ["Map", new Map()], + // ["Set", new Set()], +]; + +export const KEY_MATERIAL = getFilledUint8(32, "123"); +export const SECRET_KEY_BYTES = Uint8Array.from( + Buffer.from("5620799c63c92bb7912122070f7ebb6ddd53bdf9aa63e7a7bffc177f03d14f68", "hex") +); + +export const validPublicKey = { + keygen: "********************************", // Must be at least 32 bytes + uncompressed: fromHex( + "0ae7e5822ba97ab07877ea318e747499da648b27302414f9d0b9bb7e3646d248be90c9fdaddfdb93485a6e9334f0109301f36856007e1bc875ab1b00dbf47f9ead16c5562d889d8b270002ade81e78d473204fcb51ede8659bce3d95c67903bc" + ), + compressed: fromHex( + "8ae7e5822ba97ab07877ea318e747499da648b27302414f9d0b9bb7e3646d248be90c9fdaddfdb93485a6e9334f01093" + ), +}; +export const badPublicKey = Uint8Array.from( + Buffer.from([ + ...Uint8Array.prototype.slice.call(getTestSet().pk.toBytes(false), 8), + ...Buffer.from("0123456789abcdef", "hex"), + ]) +); + +export const G1_POINT_AT_INFINITY = + "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + +export const G2_POINT_AT_INFINITY = Buffer.from( + "c000000000000000000000000000000000000000000000000000000000000000" + + "0000000000000000000000000000000000000000000000000000000000000000" + + "0000000000000000000000000000000000000000000000000000000000000000", + "hex" +); + +export const validSignature = { + keygen: "********************************", // Must be at least 32 bytes + uncompressed: fromHex( + "057565542eaa01ef2b910bf0eaba4d98a1e5b8b79cc425db08f8780732d0ea9bc85fc6175f272b2344bb27bc572ebf14022e52689dcedfccf44a00e5bd1aa59db44517217d6b0f21b372169ee761938c28914ddcb9663de54db288e760a8e14f0f465dc9f94edd3ea43442840e4ef6aeb51d1f77e8e5c5a0fadfb46f186f4644899c7cbefd6ead2b138b030b2914b748051cbab5d38fceb8bea84973ac08d1db5436f177dbcb11d9b7bbb39b6dc32047472f573c64be1d28fd848716c2844f88" + ), + compressed: fromHex( + "a57565542eaa01ef2b910bf0eaba4d98a1e5b8b79cc425db08f8780732d0ea9bc85fc6175f272b2344bb27bc572ebf14022e52689dcedfccf44a00e5bd1aa59db44517217d6b0f21b372169ee761938c28914ddcb9663de54db288e760a8e14f" + ), +}; + +export const badSignature = sullyUint8Array(getTestSet().sig.toBytes(false)); diff --git a/test/blst/aggregatePublicKeys.test.ts b/test/blst/aggregatePublicKeys.test.ts new file mode 100644 index 0000000..643335c --- /dev/null +++ b/test/blst/aggregatePublicKeys.test.ts @@ -0,0 +1,38 @@ +import {describe, expect, it} from "bun:test"; +import {PublicKey, aggregatePublicKeys} from "../../src/blst.ts"; +import {badPublicKey} from "./__fixtures__/index.js"; +import {isEqualBytes} from "./utils/helpers.js"; +import {getTestSets} from "./utils/testSets.js"; + +describe("Aggregate Public Keys", () => { + const sets = getTestSets(10); + const keys = sets.map(({pk}) => pk); + + describe("aggregatePublicKeys()", () => { + it("should return a PublicKey", () => { + const agg = aggregatePublicKeys(keys); + expect(agg instanceof PublicKey).toBeTrue(); + }); + it("should be able to keyValidate PublicKey", () => { + const agg = aggregatePublicKeys(keys); + expect(agg.keyValidate() === undefined).toBeTrue(); + }); + it("should throw for invalid PublicKey", () => { + try { + aggregatePublicKeys(keys.concat(PublicKey.fromBytes(badPublicKey)), true); + expect.fail("Did not throw error for badPublicKey"); + } catch (e) { + expect( + e.message === "PointNotOnCurve" || + e.message === "PointNotInGroup" || + e.message === "BadEncoding" + ).toBeTrue(); + } + }); + it("should return a key that is not in the keys array", () => { + const agg = aggregatePublicKeys(keys); + const serialized = agg.toBytes(); + expect(keys.find((key) => isEqualBytes(key.toBytes(), serialized)) === undefined).toBeTrue(); + }); + }); +}); diff --git a/test/blst/aggregateWithRandomness.test.ts b/test/blst/aggregateWithRandomness.test.ts new file mode 100644 index 0000000..9b251c9 --- /dev/null +++ b/test/blst/aggregateWithRandomness.test.ts @@ -0,0 +1,98 @@ +import {beforeAll, describe, expect, it} from "bun:test"; +import { + PublicKey, + Signature, + aggregatePublicKeys, + aggregateSerializedSignatures, + aggregateWithRandomness, + asyncAggregateWithRandomness, + verifyMultipleAggregateSignatures, +} from "../../src/blst.ts"; +import {G1_POINT_AT_INFINITY, G2_POINT_AT_INFINITY} from "./__fixtures__/index.js"; +import {expectNotEqualHex, getTestSet, getTestSetsSameMessage} from "./utils/index.js"; + +describe("Aggregate With Randomness", () => { + const sameMessageSets = getTestSetsSameMessage(10); + const msg = sameMessageSets.msg; + const sets = sameMessageSets.sets.map((s) => ({ + msg: msg, + pk: s.pk, + sig: s.sig.toBytes(), + })); + const randomSet = getTestSet(20); + const infinityPublicKey = Buffer.from(G1_POINT_AT_INFINITY, "hex"); + + beforeAll(() => { + // make sure sets are valid before starting + expect(() => PublicKey.fromBytes(infinityPublicKey).keyValidate()).toThrow("PkIsInfinity"); + const sig = Signature.fromBytes(sets[0].sig); + expect(sig.verify(msg, sets[0].pk)).toBeTrue(); + + expectNotEqualHex(msg, randomSet.msg); + expect(randomSet.sig.verify(randomSet.msg, randomSet.pk)).toBeTrue(); + expect(verifyMultipleAggregateSignatures([randomSet])).toBeTrue(); + }); + + describe("aggregateWithRandomness()", () => { + it("should not accept an empty array argument", () => { + expect(() => aggregateWithRandomness([])).toThrow("At least one PkAndSerializedSig is required"); + }); + it("should throw for invalid serialized", () => { + expect(() => + aggregateWithRandomness( + sets.concat({ + pk: sets[0].pk, + //TODO: (@matthewkeil) this throws error "Public key is infinity" not signature because there is only one blst error + sig: G2_POINT_AT_INFINITY, + } as any) + ) + ).toThrow(); + }); + it("should return a {pk: PublicKey, sig: Signature} object", () => { + const agg = aggregateWithRandomness(sets); + expect(agg).toBeInstanceOf(Object); + + expect(agg.pk).toBeDefined(); + expect(agg.pk).toBeInstanceOf(PublicKey); + expect(() => agg.pk.keyValidate()).not.toThrow(); + + expect(agg.sig).toBeDefined(); + expect(agg.sig).toBeInstanceOf(Signature); + expect(() => agg.sig.sigValidate()).not.toThrow(); + }); + it("should add randomness to aggregated publicKey", () => { + const withoutRandomness = aggregatePublicKeys(sets.map(({pk}) => pk)); + const withRandomness = aggregateWithRandomness(sets).pk; + expectNotEqualHex(withRandomness.toBytes(), withoutRandomness.toBytes()); + }); + it("should add randomness to aggregated signature", () => { + const withoutRandomness = aggregateSerializedSignatures(sets.map(({sig}) => sig)); + const withRandomness = aggregateWithRandomness(sets).sig; + expectNotEqualHex(withRandomness.toBytes(), withoutRandomness.toBytes()); + }); + it("should produce verifiable set", () => { + const {pk, sig} = aggregateWithRandomness(sets); + expect(sig.verify(msg, pk)).toBeTrue(); + }); + it("should not validate for different message", async () => { + const {pk, sig} = aggregateWithRandomness(sets); + expect(sig.verify(randomSet.msg, pk)).toBeFalse(); + }); + it("should not validate included key/sig for different message", async () => { + const {pk, sig} = aggregateWithRandomness([...sets, {pk: randomSet.pk, sig: randomSet.sig.toBytes()}]); + expect(sig.verify(msg, pk)).toBeFalse(); + }); + it("should return different signatures for different sets", () => { + const {pk: pk1, sig: sig1} = aggregateWithRandomness(sets); + const {pk: pk2, sig: sig2} = aggregateWithRandomness([...sets, {pk: randomSet.pk, sig: randomSet.sig.toBytes()}]); + expectNotEqualHex(pk1.toBytes(), pk2.toBytes()); + expectNotEqualHex(sig1.toBytes(), sig2.toBytes()); + }); + it("should return different signatures for different times", () => { + const {pk: pk1, sig: sig1} = aggregateWithRandomness(sets); + const {pk: pk2, sig: sig2} = aggregateWithRandomness(sets); + expectNotEqualHex(pk1.toBytes(), pk2.toBytes()); + expectNotEqualHex(sig1.toBytes(), sig2.toBytes()); + }); + }); +}); diff --git a/test/blst/publicKey.test.ts b/test/blst/publicKey.test.ts new file mode 100644 index 0000000..84418ed --- /dev/null +++ b/test/blst/publicKey.test.ts @@ -0,0 +1,71 @@ +import {afterAll, describe, expect, it} from "bun:test"; +import {PublicKey, SecretKey, PUBLIC_KEY_COMPRESS_SIZE, PUBLIC_KEY_SIZE} from "../../src/blst.ts"; +import {G1_POINT_AT_INFINITY, SECRET_KEY_BYTES, invalidInputs, validPublicKey} from "./__fixtures__/index.js"; +import {expectEqualHex, expectNotEqualHex, sullyUint8Array} from "./utils/helpers.js"; + +describe("PublicKey", () => { + it("should exist", () => { + expect(PublicKey).toBeFunction(); + }); + + describe("constructors", () => { + describe("deserialize", () => { + it("should only take 48 or 96 bytes", () => { + expect(() => PublicKey.fromBytes(Buffer.alloc(32, "*"))).toThrow("Invalid encoding"); + }); + it("should take compressed byte arrays", () => { + expectEqualHex(PublicKey.fromBytes(validPublicKey.compressed).toBytes(), validPublicKey.compressed); + }); + + describe("argument validation", () => { + for (const [type, invalid] of invalidInputs) { + it(`should throw on invalid pkBytes type: ${type}`, () => { + expect(() => PublicKey.fromBytes(invalid)).toThrow(); + }); + } + it("should throw incorrect length pkBytes", () => { + expect(() => PublicKey.fromBytes(Buffer.alloc(12, "*"))).toThrow("Invalid encoding"); + }); + }); + it("should throw on invalid key", () => { + try { + PublicKey.fromBytes(sullyUint8Array(validPublicKey.compressed), true); + throw new Error("Did not throw error for badPublicKey"); + } catch (e) { + expect( + e.message === "PointNotOnCurve" || e.message === "BadEncoding" + ).toBeTrue(); + } + }); + it("should throw on zero key", () => { + expect(() => PublicKey.fromBytes(Buffer.from(G1_POINT_AT_INFINITY))).toThrow("Invalid encoding"); + }); + }); + }); + + describe("methods", () => { + describe("toBytes", () => { + const sk = SecretKey.fromBytes(SECRET_KEY_BYTES); + const pk = sk.toPublicKey(); + it("should toBytes the key to Uint8Array", () => { + expect(pk.toBytes()).toBeInstanceOf(Uint8Array); + }); + it("should serialize uncompressed to the correct length", () => { + expect(pk.toBytes()).toHaveLength(PUBLIC_KEY_COMPRESS_SIZE); + }); + }); + describe("toHex", () => { + it("should toHex string correctly", () => { + const key = PublicKey.fromBytes(validPublicKey.compressed); + expectEqualHex(key.toHex(true), validPublicKey.compressed); + }); + }); + describe("keyValidate()", () => { + it("should not throw on valid public key", () => { + const pk = PublicKey.fromBytes(validPublicKey.compressed, true); + expect(pk.keyValidate()).toBeUndefined(); + }); + }); + }); +}); + diff --git a/test/blst/secretKey.test.ts b/test/blst/secretKey.test.ts new file mode 100644 index 0000000..221f3b1 --- /dev/null +++ b/test/blst/secretKey.test.ts @@ -0,0 +1,94 @@ +import {afterAll, beforeEach, describe, expect, it} from "bun:test"; +import {SecretKey, Signature, PublicKey, SECRET_KEY_SIZE} from "../../src/blst.ts"; +import {KEY_MATERIAL, SECRET_KEY_BYTES, invalidInputs} from "./__fixtures__/index.js"; +import {expectEqualHex, expectNotEqualHex} from "./utils/helpers.js"; + +describe("SecretKey", () => { + it("should exist", () => { + expect(SecretKey).toBeFunction(); + }); + + describe("constructors", () => { + describe("SecretKey.fromKeygen", () => { + it("should create an instance from Uint8Array ikm", () => { + expect(SecretKey.fromKeygen(KEY_MATERIAL)).toBeInstanceOf(SecretKey); + }); + it("should create the same key from the same ikm", () => { + expectEqualHex(SecretKey.fromKeygen(KEY_MATERIAL).toBytes(), SecretKey.fromKeygen(KEY_MATERIAL).toBytes()); + }); + + describe("argument validation", () => { + const validInfoTypes = ["undefined", "null", "string"]; + for (const [type, invalid] of invalidInputs) { + it(`should throw on invalid ikm type: ${type}`, () => { + expect(() => SecretKey.fromKeygen(invalid)).toThrow(); + }); + if (!validInfoTypes.includes(type)) { + it(`should throw on invalid info type: ${type}`, () => { + expect(() => SecretKey.fromKeygen(KEY_MATERIAL, invalid)).toThrow(); + }); + } + } + }); + + describe("SecretKey.fromBytes", () => { + it("should create an instance", () => { + expect(SecretKey.fromBytes(SECRET_KEY_BYTES)).toBeInstanceOf(SecretKey); + }); + describe("argument validation", () => { + for (const [type, invalid] of invalidInputs) { + it(`should throw on invalid ikm type: ${type}`, () => { + expect(() => SecretKey.fromBytes(invalid)).toThrow(); + }); + } + }); + }); + }); + }); + + describe("instance methods", () => { + let key: SecretKey; + beforeEach(() => { + key = SecretKey.fromKeygen(KEY_MATERIAL); + }); + describe("toBytes", () => { + it("should toBytes the key to Uint8Array", () => { + expect(key.toBytes()).toBeInstanceOf(Uint8Array); + }); + it("should be the correct length", () => { + expect(key.toBytes().length).toEqual(SECRET_KEY_SIZE); + }); + it("should reconstruct the same key", () => { + const serialized = key.toBytes(); + expectEqualHex(SecretKey.fromBytes(serialized).toBytes(), serialized); + }); + }); + describe("toHex", () => { + it("should toHex string correctly", () => { + const key = SecretKey.fromBytes(SECRET_KEY_BYTES); + expectEqualHex(key.toHex(), SECRET_KEY_BYTES); + }); + }); + describe("toPublicKey", () => { + it("should create a valid PublicKey", () => { + const pk = key.toPublicKey(); + expect(pk).toBeInstanceOf(PublicKey); + expect(pk.keyValidate()).toBeUndefined(); + }); + it("should return the same PublicKey from the same SecretKey", () => { + const sk = SecretKey.fromBytes(SECRET_KEY_BYTES); + const pk1 = sk.toPublicKey().toBytes(); + const pk2 = sk.toPublicKey().toBytes(); + expectEqualHex(pk1, pk2); + }); + }); + describe("sign", () => { + it("should create a valid Signature", () => { + const sig = SecretKey.fromKeygen(KEY_MATERIAL).sign(Buffer.from("some fancy message")); + expect(sig).toBeInstanceOf(Signature); + expect(sig.sigValidate()).toBeUndefined(); + }); + }); + }); +}); + diff --git a/test/blst/signature.test.ts b/test/blst/signature.test.ts new file mode 100644 index 0000000..0fac6ed --- /dev/null +++ b/test/blst/signature.test.ts @@ -0,0 +1,60 @@ +import {afterAll, describe, expect, it} from "bun:test"; +import {SecretKey, Signature, SIGNATURE_LENGTH_COMPRESSED, SIGNATURE_LENGTH_UNCOMPRESSED} from "../../src/blst.ts"; +import {KEY_MATERIAL, invalidInputs, validSignature} from "./__fixtures__/index.js"; +import {expectEqualHex, expectNotEqualHex, sullyUint8Array} from "./utils/helpers.js"; + +describe("Signature", () => { + it("should exist", () => { + expect(Signature).toBeFunction(); + }); + describe("constructor", () => { + describe("Signature.fromBytes()", () => { + it("should take compressed byte arrays", () => { + expectEqualHex(Signature.fromBytes(validSignature.compressed).toBytes(), validSignature.compressed); + }); + describe("argument validation", () => { + // for (const [type, invalid] of invalidInputs) { + // it(`should throw on invalid pkBytes type: ${type}`, () => { + // expect(() => Signature.fromBytes(invalid)).toThrow(); + // }); + // } + it("should only take 96 or 192 bytes", () => { + expect(() => Signature.fromBytes(Buffer.alloc(32, "*"))).toThrow("BadEncoding"); + }); + }); + it("should throw on invalid key", () => { + expect(() => Signature.fromBytes(sullyUint8Array(validSignature.compressed))).toThrow("BadEncoding"); + }); + }); + }); + + describe("methods", () => { + describe("toBytes", () => { + const sig = SecretKey.fromKeygen(KEY_MATERIAL).sign(Buffer.from("some fancy message")); + it("should toBytes the signature to Uint8Array", () => { + expect(sig.toBytes()).toBeInstanceOf(Uint8Array); + }); + it("should serialize compressed to the correct length", () => { + expect(sig.toBytes()).toHaveLength(SIGNATURE_LENGTH_COMPRESSED); + }); + }); + describe("toHex", () => { + it("should toHex string correctly", () => { + const key = Signature.fromBytes(validSignature.compressed); + expectEqualHex(key.toHex(true), validSignature.compressed); + }); + }); + describe("sigValidate()", () => { + it("should return undefined for valid", () => { + const sig = Signature.fromBytes(validSignature.compressed); + expect(sig.sigValidate()).toBeUndefined(); + }); + it("should throw for invalid", () => { + const pkSeed = Signature.fromBytes(validSignature.compressed); + const sig = Signature.fromBytes(Uint8Array.from([...pkSeed.toBytes().subarray(0, 94), ...Buffer.from("a1")])); + expect(() => sig.sigValidate()).toThrow("PointNotInGroup"); + }); + }); + }); +}); + diff --git a/test/blst/utils/helpers.ts b/test/blst/utils/helpers.ts new file mode 100644 index 0000000..98bef3e --- /dev/null +++ b/test/blst/utils/helpers.ts @@ -0,0 +1,119 @@ +import {expect, it} from "bun:test"; + +type BufferLike = string | Uint8Array | Buffer | bindings.PublicKey | bindings.Signature; + +/** + * Enforce tests for all instance methods + */ +type InstanceTestCases = { + [P in keyof Omit]: { + id?: string; + instance?: InstanceType; + args: Parameters; + res?: ReturnType; + }[]; +}; + +function toHexString(bytes: BufferLike): string { + if (typeof bytes === "string") return bytes; + if (bytes instanceof Buffer) return bytes.toString("hex"); + if (bytes instanceof Uint8Array) return Buffer.from(bytes).toString("hex"); + throw Error("toHexString only accepts BufferLike types"); +} + +export function toHex(bytes: BufferLike): string { + const hex = toHexString(bytes); + if (hex.startsWith("0x")) return hex; + return "0x" + hex; +} + +export function fromHex(hexString: string): Buffer { + if (hexString.startsWith("0x")) hexString = hexString.slice(2); + return Buffer.from(hexString, "hex"); +} + +export function isEqualBytes(value: BufferLike, expected: BufferLike): boolean { + return toHex(value) === toHex(expected); +} + +export function expectEqualHex(value: BufferLike, expected: BufferLike): void { + expect(toHex(value)).toBe(toHex(expected)); +} + +export function expectNotEqualHex(value: BufferLike, expected: BufferLike): void { + expect(toHex(value)).not.toBe(toHex(expected)); +} + +export function getFilledUint8(length: number, fillWith: string | number | Buffer = "*"): Uint8Array { + return Uint8Array.from(Buffer.alloc(length, fillWith)); +} + +export function sullyUint8Array(bytes: Uint8Array): Uint8Array { + return Uint8Array.from( + Buffer.from([...Uint8Array.prototype.slice.call(bytes, 8), ...Buffer.from("0123456789abcdef", "hex")]) + ); +} + +export function arrayOfIndexes(start: number, end: number): number[] { + const arr: number[] = []; + for (let i = start; i <= end; i++) arr.push(i); + return arr; +} + +export function shuffle(array: T[]): T[] { + let currentIndex = array.length, + randomIndex; + + while (currentIndex !== 0) { + randomIndex = Math.floor(Math.random() * currentIndex); + currentIndex--; + + [array[currentIndex], array[randomIndex]] = [array[randomIndex], array[currentIndex]]; + } + + return array; +} + +export function chunkifyMaximizeChunkSize(arr: T[], minPerChunk: number): T[][] { + const chunkCount = Math.floor(arr.length / minPerChunk); + if (chunkCount <= 1) { + return [arr]; + } + + // Prefer less chunks of bigger size + const perChunk = Math.ceil(arr.length / chunkCount); + const arrArr: T[][] = []; + + for (let i = 0; i < arr.length; i += perChunk) { + arrArr.push(arr.slice(i, i + perChunk)); + } + + return arrArr; +} + +/** + * Enforce tests for all instance methods and run them + */ +export function runInstanceTestCases( + instanceTestCases: InstanceTestCases, + getInstance: () => InstanceType +): void { + for (const [key, testCases] of Object.entries(instanceTestCases)) { + const methodKey = key as keyof InstanceType; + for (const testCase of testCases) { + it(`${String(methodKey)}: ${testCase.id || ""}`, () => { + // Get a new fresh instance for this test + const instance = testCase.instance || getInstance(); + if (typeof instance[methodKey] !== "function") throw Error(`Method ${String(methodKey)} does not exist`); + const res = (instance[methodKey] as (...args: any) => any)(...testCase.args); + if (!res) { + // OK + } else if (res.serialize || res instanceof Uint8Array) { + expectEqualHex(res, testCase.res); + } else { + expect(res).toEqual(testCase.res); + } + }); + } + } +} diff --git a/test/blst/utils/index.ts b/test/blst/utils/index.ts new file mode 100644 index 0000000..e57319a --- /dev/null +++ b/test/blst/utils/index.ts @@ -0,0 +1,2 @@ +export * from "./helpers.js"; +export * from "./testSets.js"; diff --git a/test/blst/utils/testSets.ts b/test/blst/utils/testSets.ts new file mode 100644 index 0000000..f78837c --- /dev/null +++ b/test/blst/utils/testSets.ts @@ -0,0 +1,110 @@ +import crypto from "node:crypto"; +import {SECRET_KEY_SIZE, SecretKey, type Signature} from "../../../src/blst.ts"; +import {arrayOfIndexes} from "./helpers.js"; + +interface TestSet { + msg: Uint8Array; + sk: bindings.SecretKey; + pk: bindings.PublicKey; + sig: bindings.Signature; +} + +interface SameMessageTestSets { + msg: Uint8Array; + sets: { + sk: bindings.SecretKey; + pk: bindings.PublicKey; + sig: bindings.Signature; + }[]; +} + +type SerializedSet = Record; + +const DEFAULT_TEST_MESSAGE = Uint8Array.from(Buffer.from("test-message")); + +export function buildTestSetFromMessage(msg: Uint8Array = DEFAULT_TEST_MESSAGE): TestSet { + const sk = SecretKey.fromKeygen(crypto.randomBytes(SECRET_KEY_SIZE)); + const pk = sk.toPublicKey(); + const sig = sk.sign(msg); + try { + pk.keyValidate(); + } catch { + console.log(">>>\n>>>\n>>> Invalid Key Found in a TestSet\n>>>\n>>>"); + return buildTestSetFromMessage(msg); + } + try { + sig.sigValidate(); + } catch { + console.log(">>>\n>>>\n>>> Invalid Signature Found in a TestSet\n>>>\n>>>"); + return buildTestSetFromMessage(msg); + } + return { + msg, + sk, + pk, + sig, + }; +} + +const testSets = new Map(); +function buildTestSet(i: number): TestSet { + const message = crypto.randomBytes(32); + const set = buildTestSetFromMessage(message); + testSets.set(i, set); + return set; +} + +export function getTestSet(i = 0): TestSet { + const set = testSets.get(i); + if (set) { + return set; + } + return buildTestSet(i); +} + +export function getTestSets(count: number): TestSet[] { + return arrayOfIndexes(0, count - 1).map(getTestSet); +} + +export const commonMessage = crypto.randomBytes(32); + +const commonMessageSignatures = new Map(); +export function getTestSetSameMessage(i = 1): TestSet { + const set = getTestSet(i); + let sig = commonMessageSignatures.get(i); + if (!sig) { + sig = set.sk.sign(commonMessage); + commonMessageSignatures.set(i, sig); + } + return { + msg: commonMessage, + sk: set.sk, + pk: set.pk, + sig, + }; +} + +export function getTestSetsSameMessage(count: number): SameMessageTestSets { + const sets = arrayOfIndexes(0, count - 1).map(getTestSetSameMessage); + return { + msg: sets[0].msg, + sets: sets.map(({sk, pk, sig}) => ({sk, pk, sig})), + }; +} + +const serializedSets = new Map(); +export function getSerializedTestSet(i = 1): SerializedSet { + const set = serializedSets.get(i); + if (set) { + return set; + } + const deserialized = getTestSet(i); + const serialized = { + msg: deserialized.msg, + sk: deserialized.sk.toBytes(), + pk: deserialized.pk.toBytes(), + sig: deserialized.sig.toBytes(), + }; + serializedSets.set(i, serialized); + return serialized; +} diff --git a/test/blst/verify.test.ts b/test/blst/verify.test.ts new file mode 100644 index 0000000..fbceb80 --- /dev/null +++ b/test/blst/verify.test.ts @@ -0,0 +1,70 @@ +import {afterAll, beforeAll, describe, expect, it} from "bun:test"; + +import {sullyUint8Array} from "./utils/helpers.js"; +import {getTestSet} from "./utils/testSets.js"; +import type {TestSet} from "./utils/types.js"; + +describe("Verify", () => { + let testSet: TestSet; + beforeAll(() => { + testSet = getTestSet(); + }); + + describe("verify", () => { + it("should return a boolean", () => { + expect(testSet.sig.verify(testSet.msg, testSet.pk)).toBeBoolean(); + }); + describe("should default to false", () => { + it("should handle invalid message", () => { + expect(testSet.sig.verify(sullyUint8Array(testSet.msg), testSet.pk)).toBeFalse(); + }); + }); + it("should return true for valid sets", () => { + expect(testSet.sig.verify(testSet.msg, testSet.pk)).toBeTrue(); + }); + }); +}); + +describe("Aggregate Verify", () => { + let testSet: TestSet; + beforeAll(() => { + testSet = getTestSet(); + }); + describe("aggregateVerify", () => { + it("should return a boolean", () => { + expect(testSet.sig.aggregateVerify([testSet.msg], [testSet.pk])).toBeBoolean(); + }); + describe("should default to false", () => { + it("should handle invalid message", () => { + expect(testSet.sig.aggregateVerify([sullyUint8Array(testSet.msg)], [testSet.pk])).toBeFalse(); + }); + }); + it("should return true for valid sets", () => { + expect(testSet.sig.aggregateVerify([testSet.msg], [testSet.pk])).toBeTrue(); + }); + }); +}); + +describe("Fast Aggregate Verify", () => { + let testSet: TestSet; + beforeAll(() => { + testSet = getTestSet(); + }); + describe("fastAggregateVerify", () => { + it("should return a boolean", () => { + expect(testSet.sig.fastAggregateVerify(testSet.msg, [testSet.pk])).toBeBoolean(); + }); + describe("should default to false", () => { + it("should handle invalid message", () => { + const res = testSet.sig.fastAggregateVerify(sullyUint8Array(testSet.msg), [testSet.pk]); + console.log(res); + + expect(res).toBeFalse(); + }); + }); + it("should return true for valid sets", () => { + expect(testSet.sig.fastAggregateVerify(testSet.msg, [testSet.pk])).toBeTrue(); + }); + }); +}); + diff --git a/test/blst/verifyMultipleAggregateSignatures.test.ts b/test/blst/verifyMultipleAggregateSignatures.test.ts new file mode 100644 index 0000000..43fabe9 --- /dev/null +++ b/test/blst/verifyMultipleAggregateSignatures.test.ts @@ -0,0 +1,25 @@ +import {afterAll, describe, expect, it} from "bun:test"; +import {verifyMultipleAggregateSignatures} from "../../src/blst.ts"; +import {getTestSet, getTestSets} from "./utils/testSets.js"; + +describe("Verify Multiple Aggregate Signatures", () => { + describe("verifyMultipleAggregateSignatures", () => { + it("should return a boolean", () => { + expect(verifyMultipleAggregateSignatures([])).toBeBoolean(); + }); + it("should default to false", () => { + expect(verifyMultipleAggregateSignatures([])).toBeFalse(); + }); + it("should return true for valid sets", () => { + expect(verifyMultipleAggregateSignatures(getTestSets(6))).toBeTrue(); + }); + it("should return false for invalid sets", () => { + const sets = getTestSets(6); + const randomSet = getTestSet(20); + // do not modify sets[0].sig directly, it will affect other tests + sets[0] = {...sets[0], sig: randomSet.sig}; + expect(verifyMultipleAggregateSignatures(sets)).toBeFalse(); + }); + }); +}); + diff --git a/zbuild.zon b/zbuild.zon index 3246ce6..446f710 100644 --- a/zbuild.zon +++ b/zbuild.zon @@ -2,6 +2,9 @@ .name = .lodestar_z_bun, .version = "0.1.0", .dependencies = .{ + .blst = .{ + .url = "git+https://github.com/Chainsafe/blst-z#ea0ae943083ba24b6a3d60051f971d28d7ed560f", + }, .hashtree = .{ .url = "git+https://github.com/chainsafe/hashtree-z#43a58b0fd4813515cda3d0ffc622125243a01c54", }, @@ -30,6 +33,7 @@ .hashtree, .lmdb, .leveldb, + .blst, "ssz:persistent_merkle_tree", }, .pic = true, diff --git a/zig/blst.zig b/zig/blst.zig new file mode 100644 index 0000000..dc30500 --- /dev/null +++ b/zig/blst.zig @@ -0,0 +1,402 @@ +//! C-ABI of the Zig native `blst` bindings. +//! +//! The Zig native types are represented here as their C-ABI equivalent types for bun interoperability. +//! +//! - `SecretKey`: `c.blst_scalar` +//! - `PublicKey`: `c.blst_p1_affine` +//! - `Signature`: `c.blst_p2_affine` +//! - `AggregatePublicKey`: `c.blst_p1` +//! - `AggregateSignature`: `c.blst_p2` +//! +//! We do not define Zig native types for these raw C types in order to not obfuscate the underlying type. + +/// Size of the scratch buffer for pairing operations. +pub const SCRATCH_SIZE_PAIRING: usize = blst.Pairing.sizeOf(); + +/// Scratch buffer used for pairing operations that require temporary storage. +threadlocal var scratch_pairing: [SCRATCH_SIZE_PAIRING]u8 = undefined; + +/// Size of the scratch buffer for aggregation operations. +pub const SCRATCH_SIZE_AGG: usize = 1024 * 16; + +/// Scratch buffer used for aggregation operations that require temporary storage. +threadlocal var scratch_agg: [SCRATCH_SIZE_AGG]u64 = undefined; + +////// SecretKey + +/// Deserialize `SecretKey` represented as `c.blst_scalar` from bytes. +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeyFromBytes(out: *c.blst_scalar, bytes: [*c]const u8, len: c_uint) i32 { + const deser = SecretKey.deserialize(@ptrCast(bytes[0..len])) catch |e| return toErrCode(e); + out.* = deser.value; + return 0; +} + +/// Serialize a `SecretKey` represented as `c.blst_scalar` to bytes. +pub export fn secretKeyToBytes(out: [*c]u8, sk_raw: *const c.blst_scalar) void { + const sk: *const SecretKey = @ptrCast(sk_raw); + out[0..SecretKey.serialize_size].* = sk.serialize(); +} + +/// Generate a `SecretKey` represented as `c.blst_scalar` from input key material using HKDF. +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeyKeyGen(out: *c.blst_scalar, ikm: [*c]const u8, ikm_len: c_uint) i32 { + out.* = (SecretKey.keyGen(ikm[0..ikm_len], null) catch |e| return toErrCode(e)).value; + return 0; +} + +/// Generate a `SecretKey` represented as `c.blst_scalar` from input key material using HKDF (version 3). +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeyKeyGenV3(out: *c.blst_scalar, ikm: [*c]const u8, ikm_len: c_uint) i32 { + out.* = (SecretKey.keyGenV3(ikm[0..ikm_len], null) catch |e| return toErrCode(e)).value; + return 0; +} + +/// Generate a `SecretKey` represented as `c.blst_scalar` from input key material using HKDF (version 4.5). +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeyKeyGenV45( + out: *c.blst_scalar, + ikm: [*c]const u8, + ikm_len: c_uint, + salt: [*c]const u8, + salt_len: c_uint, +) i32 { + out.* = (SecretKey.keyGenV45( + ikm[0..ikm_len], + salt[0..salt_len], + null, + ) catch |e| return toErrCode(e)).value; + return 0; +} + +/// Derive a master `SecretKey` represented as `c.blst_scalar` using EIP-2333 key derivation. +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeyDeriveMasterEip2333(out: *c.blst_scalar, ikm: [*c]const u8, ikm_len: c_uint) i32 { + out.* = (SecretKey.deriveMasterEip2333(ikm[0..ikm_len]) catch |e| return toErrCode(e)).value; + return 0; +} + +/// Derive a child `SecretKey` represented as `c.blst_scalar` using EIP-2333 key derivation. +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeyDeriveChildEip2333(out: *c.blst_scalar, sk_raw: *const c.blst_scalar, index: c_uint) i32 { + const sk: *const SecretKey = @ptrCast(sk_raw); + out.* = (sk.deriveChildEip2333(index) catch |e| return toErrCode(e)).value; + return 0; +} + +/// Derive a `PublicKey` represented as `c.blst_p1_affine` from a `SecretKey` represented as `c.blst_scalar`. +pub export fn secretKeyToPublicKey(out: *c.blst_p1_affine, sk_raw: *const c.blst_scalar) void { + const sk: *const SecretKey = @ptrCast(sk_raw); + out.* = sk.toPublicKey().point; +} + +/// Sign a message with `SecretKey` represented as `c.blst_scalar`. and produces a `Signature` represented as `c.blst_p2_affine` in `out`. +/// +/// Returns 0 on success, error code on failure. +pub export fn secretKeySign(out: *c.blst_p2_affine, sk_raw: *const c.blst_scalar, msg: [*c]const u8, msg_len: c_uint) i32 { + const sk: *const SecretKey = @ptrCast(sk_raw); + out.* = sk.sign(msg[0..msg_len], DST, null).point; + return 0; +} + +////// PublicKey + +/// Deserialize a `PublicKey` represented as a `c.blst_p1_affine` in `out` from compressed bytes. +/// +/// Returns 0 on success, error code on failure. +pub export fn publicKeyFromBytes(out: *c.blst_p1_affine, bytes: [*c]const u8, len: c_uint) i32 { + out.* = (PublicKey.uncompress(bytes[0..len]) catch |e| return toErrCode(e)).point; + return 0; +} + +/// Serialize a `PublicKey` represented as a `c.blst_p1_affine` to compressed bytes in `out`. +pub export fn publicKeyToBytes(out: [*c]u8, pk: *const c.blst_p1_affine) void { + const pk_ptr: *const PublicKey = @ptrCast(pk); + out[0..PublicKey.COMPRESS_SIZE].* = pk_ptr.compress(); +} + +/// Validate a `c.blst_p1_affine` point as a valid `PublicKey`. +/// +/// Returns 0 on success, error code on failure. +pub export fn publicKeyValidate(a: *const c.blst_p1_affine) i32 { + const pk: *const PublicKey = @ptrCast(a); + pk.validate() catch |e| return toErrCode(e); + return 0; +} +/// Aggregate multiple `Signature`s (as `c.blst_p2_affine`s) and `PublicKey`s (as `c.blst_p1_affine`s) with randomness for security. +/// +/// Returns 0 on success, error code on failure. +pub export fn aggregateWithRandomness( + pk_out: *c.blst_p1_affine, + sig_out: *c.blst_p2_affine, + len: c_uint, + pks: [*c]*const *c.blst_p1_affine, + sigs: [*c]*const c.blst_p2_affine, + pks_validate: bool, + sigs_groupcheck: bool, +) i32 { + var rands: [32 * MAX_AGGREGATE_PER_JOB]u8 = [_]u8{0} ** (32 * MAX_AGGREGATE_PER_JOB); + var prng = std.Random.DefaultPrng.init(blk: { + var seed: u64 = undefined; + std.posix.getrandom(std.mem.asBytes(&seed)) catch unreachable; + break :blk seed; + }); + const rand = prng.random(); + std.Random.bytes(rand, &rands); + + const agg_sig = AggregateSignature.aggregateWithRandomness( + sigs[0..len], + &rands, + sigs_groupcheck, + scratch_agg[0..], + ) catch |e| return toErrCode(e); + sig_out.* = agg_sig.toSignature().point; + + const agg_pk = AggregatePublicKey.aggregateWithRandomness( + pks[0..len], + &rands, + pks_validate, + scratch_agg[0..], + ) catch |e| return toErrCode(e); + pk_out.* = agg_pk.toPublicKey().point; + + return 0; +} + +/// Aggregate multiple `PublicKey`s (as `c.blst_p1_affine`s) with randomness for security. +/// +/// Returns 0 on success, error code on failure. +pub export fn publicKeyAggregateWithRandomness( + out: *c.blst_p1_affine, + pks: [*c]*const c.blst_p1_affine, + len: c_uint, + pks_validate: bool, +) i32 { + var rands: [32 * MAX_AGGREGATE_PER_JOB]u8 = [_]u8{0} ** (32 * MAX_AGGREGATE_PER_JOB); + var prng = std.Random.DefaultPrng.init(blk: { + var seed: u64 = undefined; + std.posix.getrandom(std.mem.asBytes(&seed)) catch unreachable; + break :blk seed; + }); + const rand = prng.random(); + std.Random.bytes(rand, &rands); + + const agg_pk = AggregatePublicKey.aggregateWithRandomness( + pks[0..len], + &rands, + pks_validate, + scratch_agg[0..], + ) catch |e| return toErrCode(e); + + out.* = agg_pk.toPublicKey().point; + + return 0; +} + +/// Aggregate multiple `PublicKey`s (as `c.blst_p1_affine`s). +/// +/// Returns 0 on success, error code on failure. +pub export fn publicKeyAggregate(out: *c.blst_p1_affine, pks: [*c]const c.blst_p1_affine, len: c_uint, pks_validate: bool) i32 { + const agg_pk = AggregatePublicKey.aggregate(@ptrCast(pks[0..len]), pks_validate) catch |e| return toErrCode(e); + out.* = agg_pk.toPublicKey().point; + + return 0; +} + +////// Signature + +/// Deserialize a `Signature` as `c.blst_p2_affine` in `out` from compressed bytes. +/// +/// Returns 0 on success, error code on failure. +pub export fn signatureFromBytes(out: *c.blst_p2_affine, bytes: [*c]const u8, bytes_len: c_uint) i32 { + out.* = (Signature.uncompress(bytes[0..bytes_len]) catch |e| return toErrCode(e)).point; + return 0; +} + +/// Serialize a `Signature` as `c.blst_p2_affine` to compressed bytes in `out`. +pub export fn signatureToBytes(out: [*c]u8, sig: *const c.blst_p2_affine) void { + const sig_ptr: *const Signature = @ptrCast(sig); + out[0..Signature.COMPRESS_SIZE].* = sig_ptr.compress(); +} + +/// Validate a `c.blst_p2_affine` as a valid `Signature`. +/// +/// Returns 0 on success, error code on failure. +pub export fn signatureValidate(sig: *const c.blst_p2_affine, sig_infcheck: bool) i32 { + const sig_ptr: *const Signature = @ptrCast(sig); + sig_ptr.validate(sig_infcheck) catch |e| return toErrCode(e); + return 0; +} + +/// Verify a `Signature` (as `c.blst_p2_affine`) against a `PublicKey` (`c.blst_p1_affine`) and message `msg`. +/// +/// Returns 0 on success, error code on failure. +pub export fn signatureVerify( + sig: *const c.blst_p2_affine, + sig_groupcheck: bool, + msg: [*c]const u8, + msg_len: c_uint, + pk: *const c.blst_p1_affine, + pk_validate: bool, +) i32 { + const sig_ptr: *const Signature = @ptrCast(sig); + sig_ptr.verify( + sig_groupcheck, + msg[0..msg_len], + DST, + null, + @ptrCast(pk), + pk_validate, + ) catch |e| return toErrCode(e); + return 0; +} + +/// Verify an aggregate signature `c.blst_p2_affine` against multiple messages and `c.blst_p1_affine`s. +/// +/// Returns 0 if verification succeeds, 1 if verification fails, error code on error. +pub export fn signatureAggregateVerify( + sig: *const c.blst_p2_affine, + sig_groupcheck: bool, + msgs: [*c]const [32]u8, + pks: [*c]const c.blst_p1_affine, + len: c_uint, + pks_validate: bool, +) i32 { + const sig_ptr: *const Signature = @ptrCast(sig); + const res = sig_ptr.aggregateVerify( + sig_groupcheck, + &scratch_pairing, + msgs[0..len], + DST, + @ptrCast(pks[0..len]), + pks_validate, + ) catch |e| return toErrCode(e); + return @intFromBool(!res); +} + +/// Faster verify an aggregated signature `Signature` (as `c.blst_p2_affine`) against multiple messages and `PublicKey`s (as `c.blst_p1_affine`s). +/// +/// Returns 0 if verification succeeds, 1 if verification fails, error code on error. +pub export fn signatureFastAggregateVerify( + sig: *const c.blst_p2_affine, + sig_groupcheck: bool, + msg: *[32]u8, + pks: [*c]const c.blst_p1_affine, + pks_len: c_uint, +) i32 { + const sig_ptr: *const Signature = @ptrCast(sig); + const res = sig_ptr.fastAggregateVerify( + sig_groupcheck, + &scratch_pairing, + msg.*, + DST, + @ptrCast(pks[0..pks_len]), + ) catch |e| return toErrCode(e); + return @intFromBool(!res); +} + +/// Verify multiple aggregate signatures efficiently. +/// +/// Returns 0 if verification succeeds, 1 if verification fails, error code on error. +pub export fn signatureVerifyMultipleAggregateSignatures( + n_elems: c_uint, + msgs: [*c]const [32]u8, + pks: [*c]const *c.blst_p1_affine, + pks_validate: bool, + sigs: [*c]const *c.blst_p2_affine, + sig_groupcheck: bool, +) i32 { + var rands: [32 * MAX_AGGREGATE_PER_JOB][32]u8 = undefined; + var prng = std.Random.DefaultPrng.init(blk: { + var seed: u64 = undefined; + std.posix.getrandom(std.mem.asBytes(&seed)) catch unreachable; + break :blk seed; + }); + const rand = prng.random(); + + for (0..32 * MAX_AGGREGATE_PER_JOB) |i| { + std.Random.bytes(rand, &rands[i]); + } + + const res = blst.verifyMultipleAggregateSignatures( + &scratch_pairing, + n_elems, + msgs[0..n_elems], + DST, + @ptrCast(pks[0..n_elems]), + pks_validate, + @ptrCast(sigs[0..n_elems]), + sig_groupcheck, + &rands, + ) catch |e| return toErrCode(e); + + return @intFromBool(!res); +} + +/// Aggregates a slice of `Signature`s (as `c.blst_p2_affine`) with randomness into a single `Signature` (represented as `c.blst_p2_affine`). +/// +/// Returns 0 on success, error code on failure. +pub export fn signatureAggregateWithRandomness( + out: *c.blst_p2_affine, + sigs: [*c]*const c.blst_p2_affine, + len: c_uint, + sigs_groupcheck: bool, +) i32 { + var rands: [32 * MAX_AGGREGATE_PER_JOB]u8 = [_]u8{0} ** (32 * MAX_AGGREGATE_PER_JOB); + var prng = std.Random.DefaultPrng.init(blk: { + var seed: u64 = undefined; + std.posix.getrandom(std.mem.asBytes(&seed)) catch unreachable; + break :blk seed; + }); + const rand = prng.random(); + std.Random.bytes(rand, &rands); + + const agg_sig = AggregateSignature.aggregateWithRandomness( + sigs[0..len], + &rands, + sigs_groupcheck, + scratch_agg[0..], + ) catch |e| return toErrCode(e); + + out.* = agg_sig.toSignature().point; + + return 0; +} + +/// Aggregates a slice of `Signature`s (as `c.blst_p2_affine`) into a single `Signature` (represented as `c.blst_p2_affine`). +/// +/// Returns 0 on success, error code on failure. +pub export fn signatureAggregate( + out: *c.blst_p2_affine, + sigs: [*c]const c.blst_p2_affine, + len: c_uint, + sigs_groupcheck: bool, +) i32 { + const agg_sig = AggregateSignature.aggregate( + @ptrCast(sigs[0..len]), + sigs_groupcheck, + ) catch |e| return toErrCode(e); + + out.* = agg_sig.toSignature().point; + + return 0; +} + +const std = @import("std"); +const blst = @import("blst"); +const Signature = blst.Signature; +const PublicKey = blst.PublicKey; +const SecretKey = blst.SecretKey; +const AggregateSignature = blst.AggregateSignature; +const AggregatePublicKey = blst.AggregatePublicKey; +const DST = blst.DST; +const MAX_AGGREGATE_PER_JOB = blst.MAX_AGGREGATE_PER_JOB; +const toErrCode = @import("common.zig").toErrCode; + +const c = blst.c; diff --git a/zig/root.zig b/zig/root.zig index 3704012..6fd5353 100644 --- a/zig/root.zig +++ b/zig/root.zig @@ -4,6 +4,7 @@ const persistent_merkle_tree = @import("persistent_merkle_tree.zig"); const bytes = @import("bytes.zig"); const lmdb = @import("lmdb.zig"); const leveldb = @import("leveldb.zig"); +const blst = @import("blst.zig"); comptime { std.testing.refAllDecls(hashtree); @@ -11,4 +12,5 @@ comptime { std.testing.refAllDecls(bytes); std.testing.refAllDecls(lmdb); std.testing.refAllDecls(leveldb); + std.testing.refAllDecls(blst); }