Skip to content

Commit

Permalink
Merge pull request #600 from snyk/fix/hash-large-files
Browse files Browse the repository at this point in the history
fix: compute sha1 on .jar files of size up to buffer.constants.MAX_LENGTH
  • Loading branch information
adrobuta committed Aug 5, 2024
2 parents e840fcc + 6c4d923 commit 73bc4cb
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 13 deletions.
1 change: 1 addition & 0 deletions lib/analyzer/applications/java.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ function unpackJar({
const nestedJars: JarBuffer[] = [];
let coords: JarCoords | null = null;

// TODO: consider switching to node-stream-zip that supports streaming
let zip: admzip;
let zipEntries: admzip.IZipEntry[];

Expand Down
25 changes: 13 additions & 12 deletions lib/buffer-utils.ts
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
import * as crypto from "crypto";
import { Readable } from "stream";
import { HashAlgorithm } from "./types";

const HASH_ENCODING = "hex";

export async function bufferToSha1(buffer: Buffer): Promise<string> {
const stream = Readable.from(buffer);
const hash = crypto.createHash(HashAlgorithm.Sha1);
const chunkSize = 100 * 1024 * 1024; // 100 MB

return new Promise((resolve, reject) => {
stream
.pipe(hash)
.on("finish", () => {
hash.end();
const digest = hash.read().toString(HASH_ENCODING);
resolve(digest);
})
.on("error", (err) => {
reject(err);
});
try {
for (let offset = 0; offset < buffer.length; offset += chunkSize) {
const end = Math.min(offset + chunkSize, buffer.length);
const chunk = buffer.slice(offset, end);
hash.update(chunk);
}

const digest = hash.digest(HASH_ENCODING);
resolve(digest);
} catch (err) {
reject(err);
}
});
}
2 changes: 1 addition & 1 deletion test/lib/buffer-utils.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ describe("buffer-utils", () => {
expect(result).toEqual(hashedText);
});

xit("should handle large files", async () => {
it("should handle large files", async () => {
const megabyte = 1024 * 1024;
const gigabyte = megabyte * 1024;

Expand Down

0 comments on commit 73bc4cb

Please sign in to comment.