Skip to content

Commit

Permalink
Compress the encoded wasm payload in the dataurl for jupyter notebooks
Browse files Browse the repository at this point in the history
  • Loading branch information
jleibs committed Dec 11, 2024
1 parent 07b8825 commit 682ba20
Showing 1 changed file with 41 additions and 7 deletions.
48 changes: 41 additions & 7 deletions rerun_js/web-viewer/bundle.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,59 @@
import { fileURLToPath } from "node:url";
import * as path from "node:path";
import * as fs from "node:fs";
import * as zlib from "node:zlib";
import * as util from "node:util";

const __filename = path.resolve(fileURLToPath(import.meta.url));
const __dirname = path.dirname(__filename);

const wasm = fs.readFileSync(path.join(__dirname, "re_viewer_bg.wasm"));
const wasm = zlib.gzipSync(fs.readFileSync(path.join(__dirname, "re_viewer_bg.wasm")));
const js = fs.readFileSync(path.join(__dirname, "re_viewer.js"), "utf-8");
const index = fs.readFileSync(path.join(__dirname, "index.js"), "utf-8");

const INLINE_MARKER = "/*<INLINE-MARKER>*/";

/** @param {Buffer} buffer */
function buffer_to_data_url(buffer) {
return `data:application/wasm;base64,${buffer.toString("base64")}`;
return `data:application/octet-stream;gzip;base64,${buffer.toString("base64")}`;
}

async function data_url_to_buffer(dataUrl) {
const response = await fetch(dataUrl);
return response.arrayBuffer();
async function compressed_data_url_to_buffer(dataUrl) {
// Fetch the data from the data URL
const response = await fetch(dataUrl);
const blob = await response.blob();

// Convert the blob to an ArrayBuffer
const arrayBuffer = await blob.arrayBuffer();
const compressedBuffer = new Uint8Array(arrayBuffer);

// Decompress using DecompressionStream
const stream = new ReadableStream({
start(controller) {
controller.enqueue(compressedBuffer);
controller.close();
},
});
const decompressedStream = stream.pipeThrough(new DecompressionStream("gzip"));
const reader = decompressedStream.getReader();
const chunks = [];
let result;

while (!(result = await reader.read()).done) {
chunks.push(result.value);
}

// Combine chunks into a single Uint8Array
const decompressedBuffer = new Uint8Array(
chunks.reduce((total, chunk) => total + chunk.length, 0)
);
let offset = 0;
for (const chunk of chunks) {
decompressedBuffer.set(chunk, offset);
offset += chunk.length;
}

return decompressedBuffer;
}

const inlined_js = js.replace("export default function", "return function");
Expand All @@ -35,9 +69,9 @@ async function fetch_viewer_js() {
}
async function fetch_viewer_wasm() {
${data_url_to_buffer.toString()}
${compressed_data_url_to_buffer.toString()}
const dataUrl = ${JSON.stringify(buffer_to_data_url(wasm))};
const buffer = await data_url_to_buffer(dataUrl);
const buffer = await compressed_data_url_to_buffer(dataUrl);
return new Response(buffer, { "headers": { "Content-Type": "application/wasm" } });
}
`;
Expand Down

0 comments on commit 682ba20

Please sign in to comment.