diff --git a/lib/analyzer/applications/java.ts b/lib/analyzer/applications/java.ts index aa10d400..8ec4820c 100644 --- a/lib/analyzer/applications/java.ts +++ b/lib/analyzer/applications/java.ts @@ -115,6 +115,7 @@ function unpackJar({ const nestedJars: JarBuffer[] = []; let coords: JarCoords | null = null; + // TODO: consider switching to node-stream-zip that supports streaming let zip: admzip; let zipEntries: admzip.IZipEntry[]; diff --git a/lib/buffer-utils.ts b/lib/buffer-utils.ts index 449312d6..4795cd6f 100644 --- a/lib/buffer-utils.ts +++ b/lib/buffer-utils.ts @@ -1,23 +1,24 @@ import * as crypto from "crypto"; -import { Readable } from "stream"; import { HashAlgorithm } from "./types"; const HASH_ENCODING = "hex"; export async function bufferToSha1(buffer: Buffer): Promise { - const stream = Readable.from(buffer); const hash = crypto.createHash(HashAlgorithm.Sha1); + const chunkSize = 100 * 1024 * 1024; // 100 MB return new Promise((resolve, reject) => { - stream - .pipe(hash) - .on("finish", () => { - hash.end(); - const digest = hash.read().toString(HASH_ENCODING); - resolve(digest); - }) - .on("error", (err) => { - reject(err); - }); + try { + for (let offset = 0; offset < buffer.length; offset += chunkSize) { + const end = Math.min(offset + chunkSize, buffer.length); + const chunk = buffer.slice(offset, end); + hash.update(chunk); + } + + const digest = hash.digest(HASH_ENCODING); + resolve(digest); + } catch (err) { + reject(err); + } }); } diff --git a/test/lib/buffer-utils.spec.ts b/test/lib/buffer-utils.spec.ts index efa87651..e3740b0c 100644 --- a/test/lib/buffer-utils.spec.ts +++ b/test/lib/buffer-utils.spec.ts @@ -23,7 +23,7 @@ describe("buffer-utils", () => { expect(result).toEqual(hashedText); }); - xit("should handle large files", async () => { + it("should handle large files", async () => { const megabyte = 1024 * 1024; const gigabyte = megabyte * 1024;