diff --git a/Cargo.lock b/Cargo.lock index 019af1b6e02de9..599657a24bc76a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1155,6 +1155,7 @@ dependencies = [ "md4", "once_cell", "path-clean", + "pbkdf2", "rand", "regex", "ripemd", @@ -1163,6 +1164,7 @@ dependencies = [ "sha-1 0.10.0", "sha2", "sha3", + "tokio", "typenum", ] @@ -3063,6 +3065,16 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" +[[package]] +name = "pbkdf2" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0ca0b5a68607598bf3bad68f32227a8164f6254833f84eafaac409cd6746c31" +dependencies = [ + "digest 0.10.6", + "hmac", +] + [[package]] name = "pem-rfc7468" version = "0.6.0" diff --git a/cli/tests/unit_node/internal/pbkdf2_test.ts b/cli/tests/unit_node/internal/pbkdf2_test.ts index 01d37dddcc3cc3..3d3378769d560e 100644 --- a/cli/tests/unit_node/internal/pbkdf2_test.ts +++ b/cli/tests/unit_node/internal/pbkdf2_test.ts @@ -4,7 +4,6 @@ import { assert, assertEquals, } from "../../../../test_util/std/testing/asserts.ts"; -import { assertCallbackErrorUncaught } from "../_test_utils.ts"; type Algorithms = | "md5" @@ -320,7 +319,8 @@ const fixtures: Pbkdf2Fixture[] = [ }, ]; -Deno.test("pbkdf2 hashes data correctly", () => { +Deno.test("pbkdf2 hashes data correctly", async () => { + const promises: Promise[] = []; fixtures.forEach(({ dkLen, iterations, @@ -330,23 +330,34 @@ Deno.test("pbkdf2 hashes data correctly", () => { }) => { for (const algorithm in results) { if (Object.hasOwn(results, algorithm)) { - pbkdf2( - key, - salt, - iterations, - dkLen, - algorithm as Algorithms, - (err, res) => { - assert(!err, String(err)); - assertEquals( - res?.toString("hex"), - results[algorithm as Algorithms], + promises.push( + new Promise((resolve, reject) => { + pbkdf2( + key, + salt, + iterations, + dkLen, + algorithm as Algorithms, + (err, res) => { + try { + assert(!err, String(err)); + assertEquals( + res?.toString("hex"), + results[algorithm as Algorithms], + ); + resolve(); + } catch (e) { + reject(e); + } + }, ); - }, + }), ); } } }); + + await Promise.all(promises); }); Deno.test("pbkdf2Sync hashes data correctly", () => { @@ -369,10 +380,11 @@ Deno.test("pbkdf2Sync hashes data correctly", () => { }); }); -Deno.test("[std/node/crypto] pbkdf2 callback isn't called twice if error is thrown", async () => { - const importUrl = new URL("node:crypto", import.meta.url); - await assertCallbackErrorUncaught({ - prelude: `import { pbkdf2 } from ${JSON.stringify(importUrl)}`, - invocation: 'pbkdf2("password", "salt", 1, 32, "sha1", ', - }); -}); +// TODO(@littledivy): assertCallbackErrorUncaught exits for async operations on the thread pool. +// Deno.test("[std/node/crypto] pbkdf2 callback isn't called twice if error is thrown", async () => { +// const importUrl = new URL("node:crypto", import.meta.url); +// await assertCallbackErrorUncaught({ +// prelude: `import { pbkdf2 } from ${JSON.stringify(importUrl)};`, +// invocation: 'pbkdf2("password", "salt", 1, 32, "sha1", ', +// }); +// }); diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 7fbd596c216952..a6a4e36a50c3c6 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -27,6 +27,7 @@ md-5 = "0.10.5" md4 = "0.10.2" once_cell.workspace = true path-clean = "=0.1.0" +pbkdf2 = "0.12.1" rand.workspace = true regex.workspace = true ripemd = "0.1.3" @@ -35,4 +36,5 @@ serde = "1.0.149" sha-1 = "0.10.0" sha2 = "0.10.6" sha3 = "0.10.5" +tokio.workspace = true typenum = "1.15.0" diff --git a/ext/node/crypto/mod.rs b/ext/node/crypto/mod.rs index 53d064d863403e..be4035561b74f0 100644 --- a/ext/node/crypto/mod.rs +++ b/ext/node/crypto/mod.rs @@ -240,3 +240,58 @@ pub fn op_node_decipheriv_final( .map_err(|_| type_error("Cipher context is already in use"))?; context.r#final(input, output) } + +fn pbkdf2_sync( + password: &[u8], + salt: &[u8], + iterations: u32, + digest: &str, + derived_key: &mut [u8], +) -> Result<(), AnyError> { + macro_rules! pbkdf2_hmac { + ($digest:ty) => {{ + pbkdf2::pbkdf2_hmac::<$digest>(password, salt, iterations, derived_key) + }}; + } + + match digest { + "md4" => pbkdf2_hmac!(md4::Md4), + "md5" => pbkdf2_hmac!(md5::Md5), + "ripemd160" => pbkdf2_hmac!(ripemd::Ripemd160), + "sha1" => pbkdf2_hmac!(sha1::Sha1), + "sha224" => pbkdf2_hmac!(sha2::Sha224), + "sha256" => pbkdf2_hmac!(sha2::Sha256), + "sha384" => pbkdf2_hmac!(sha2::Sha384), + "sha512" => pbkdf2_hmac!(sha2::Sha512), + _ => return Err(type_error("Unknown digest")), + } + + Ok(()) +} + +#[op] +pub fn op_node_pbkdf2( + password: StringOrBuffer, + salt: StringOrBuffer, + iterations: u32, + digest: &str, + derived_key: &mut [u8], +) -> bool { + pbkdf2_sync(&password, &salt, iterations, digest, derived_key).is_ok() +} + +#[op] +pub async fn op_node_pbkdf2_async( + password: StringOrBuffer, + salt: StringOrBuffer, + iterations: u32, + digest: String, + keylen: usize, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut derived_key = vec![0; keylen]; + pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key) + .map(|_| derived_key.into()) + }) + .await? +} diff --git a/ext/node/lib.rs b/ext/node/lib.rs index ba2ce6884c1c73..79ab9ed239f0a9 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -113,6 +113,8 @@ deno_core::extension!(deno_node, crypto::op_node_private_encrypt, crypto::op_node_private_decrypt, crypto::op_node_public_encrypt, + crypto::op_node_pbkdf2, + crypto::op_node_pbkdf2_async, winerror::op_node_sys_to_uv_error, v8::op_v8_cached_data_version_tag, v8::op_v8_get_heap_statistics, diff --git a/ext/node/polyfills/internal/crypto/pbkdf2.ts b/ext/node/polyfills/internal/crypto/pbkdf2.ts index c744965edec8ca..0dbb96bbb4e418 100644 --- a/ext/node/polyfills/internal/crypto/pbkdf2.ts +++ b/ext/node/polyfills/internal/crypto/pbkdf2.ts @@ -1,8 +1,10 @@ // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. import { Buffer } from "ext:deno_node/buffer.ts"; -import { createHash } from "ext:deno_node/internal/crypto/hash.ts"; import { HASH_DATA } from "ext:deno_node/internal/crypto/types.ts"; +const { core } = globalThis.__bootstrap; +const { ops } = core; + export const MAX_ALLOC = Math.pow(2, 30) - 1; export type NormalizedAlgorithms = @@ -24,78 +26,6 @@ export type Algorithms = | "sha384" | "sha512"; -const createHasher = (algorithm: string) => (value: Uint8Array) => - Buffer.from(createHash(algorithm).update(value).digest() as Buffer); - -function getZeroes(zeros: number) { - return Buffer.alloc(zeros); -} - -const sizes = { - md5: 16, - sha1: 20, - sha224: 28, - sha256: 32, - sha384: 48, - sha512: 64, - rmd160: 20, - ripemd160: 20, -}; - -function toBuffer(bufferable: HASH_DATA) { - if (bufferable instanceof Uint8Array || typeof bufferable === "string") { - return Buffer.from(bufferable as Uint8Array); - } else { - return Buffer.from(bufferable.buffer); - } -} - -export class Hmac { - hash: (value: Uint8Array) => Buffer; - ipad1: Buffer; - opad: Buffer; - alg: string; - blocksize: number; - size: number; - ipad2: Buffer; - - constructor(alg: Algorithms, key: Buffer, saltLen: number) { - this.hash = createHasher(alg); - - const blocksize = alg === "sha512" || alg === "sha384" ? 128 : 64; - - if (key.length > blocksize) { - key = this.hash(key); - } else if (key.length < blocksize) { - key = Buffer.concat([key, getZeroes(blocksize - key.length)], blocksize); - } - - const ipad = Buffer.allocUnsafe(blocksize + sizes[alg]); - const opad = Buffer.allocUnsafe(blocksize + sizes[alg]); - for (let i = 0; i < blocksize; i++) { - ipad[i] = key[i] ^ 0x36; - opad[i] = key[i] ^ 0x5c; - } - - const ipad1 = Buffer.allocUnsafe(blocksize + saltLen + 4); - ipad.copy(ipad1, 0, 0, blocksize); - - this.ipad1 = ipad1; - this.ipad2 = ipad; - this.opad = opad; - this.alg = alg; - this.blocksize = blocksize; - this.size = sizes[alg]; - } - - run(data: Buffer, ipad: Buffer) { - data.copy(ipad, this.blocksize); - const h = this.hash(ipad); - h.copy(this.opad, this.blocksize); - return this.hash(this.opad); - } -} - /** * @param iterations Needs to be higher or equal than zero * @param keylen Needs to be higher or equal than zero but less than max allocation size (2^30) @@ -115,35 +45,12 @@ export function pbkdf2Sync( throw new TypeError("Bad key length"); } - const bufferedPassword = toBuffer(password); - const bufferedSalt = toBuffer(salt); - - const hmac = new Hmac(digest, bufferedPassword, bufferedSalt.length); - - const DK = Buffer.allocUnsafe(keylen); - const block1 = Buffer.allocUnsafe(bufferedSalt.length + 4); - bufferedSalt.copy(block1, 0, 0, bufferedSalt.length); - - let destPos = 0; - const hLen = sizes[digest]; - const l = Math.ceil(keylen / hLen); - - for (let i = 1; i <= l; i++) { - block1.writeUInt32BE(i, bufferedSalt.length); - - const T = hmac.run(block1, hmac.ipad1); - let U = T; - - for (let j = 1; j < iterations; j++) { - U = hmac.run(U, hmac.ipad2); - for (let k = 0; k < hLen; k++) T[k] ^= U[k]; - } - - T.copy(DK, destPos); - destPos += hLen; + const DK = new Uint8Array(keylen); + if (!ops.op_node_pbkdf2(password, salt, iterations, digest, DK)) { + throw new Error("Invalid digest"); } - return DK; + return Buffer.from(DK); } /** @@ -159,24 +66,27 @@ export function pbkdf2( digest: Algorithms = "sha1", callback: (err: Error | null, derivedKey?: Buffer) => void, ) { - setTimeout(() => { - let err = null, - res; - try { - res = pbkdf2Sync(password, salt, iterations, keylen, digest); - } catch (e) { - err = e; - } - if (err) { - callback(err instanceof Error ? err : new Error("[non-error thrown]")); - } else { - callback(null, res); - } - }, 0); + if (typeof iterations !== "number" || iterations < 0) { + throw new TypeError("Bad iterations"); + } + if (typeof keylen !== "number" || keylen < 0 || keylen > MAX_ALLOC) { + throw new TypeError("Bad key length"); + } + + core.opAsync( + "op_node_pbkdf2_async", + password, + salt, + iterations, + digest, + keylen, + ).then( + (DK) => callback(null, Buffer.from(DK)), + ) + .catch((err) => callback(err)); } export default { - Hmac, MAX_ALLOC, pbkdf2, pbkdf2Sync,