-
Notifications
You must be signed in to change notification settings - Fork 0
/
compress-file.html
143 lines (123 loc) · 4.21 KB
/
compress-file.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>File Compressor</title>
<script src="https://cdn.jsdelivr.net/pako/1.0.5/pako.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/web-streams-polyfill@3.0.0/dist/ponyfill.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/streamsaver@2.0.5/StreamSaver.min.js"></script>
</head>
<body>
<div>
File to compress using zlib:
<br/>
<input type="file" id="file-upload" onchange="compressFileAsync(event)"/>
<br/>
</div>
<br/>
<br/>
<div>
Progress:
<div id="progressBar"></div>
</div>
<script type="text/Javascript">
'use strict';
var compressionDone = true
function readFilePromise(file, start, end) {
return new Promise(function(resolve, reject) {
var reader = new FileReader();
// .slice() allows you to get slices of the file here we take a slice of the entire file
const fileSliceBlob = file.slice(start, end);
reader.readAsArrayBuffer(fileSliceBlob);
reader.onload = function() {
resolve(reader.result);
}
reader.onerror = function() {
reject(reader.error);
}
});
}
async function compressFileAsync(event) {
var writer = undefined
try {
compressionDone = false
const file = document.getElementById("file-upload").files[0];
// Use best compression
var deflator = new pako.Deflate({ level: 9, gzip: true })
console.log("Compressoin using level 9 with gzip headers.")
// Read the file 1MB at a time
const sliceSize = 1024 * 1000
var numberOfSlices = parseInt(file.size / sliceSize, 10) + 1
if (sliceSize > file.size) {
numberOfSlices = 1
}
console.log(
"Processing file: " + file.name + " with size: " + file.size, ", slice size: " +
sliceSize + ", number of slices: " + numberOfSlices
);
// Create a writable stream
// We don't know how large the compressed file can get
const fileName = file.name + '.gz'
writer = createStream(fileName, file.size)
let startTime = performance.now(); //start time
var i = 0
for (i = 0; i < numberOfSlices; ++i) {
const startOffset = i * sliceSize
var endOffset = startOffset + sliceSize
// Update the user with some progress
// Report approximately every 10MB: ith slice is equal to i * sliceSize number of bytes processed
// 10MB are equal to 10MB / sliceSize. Since our slice size is a megabyte we can report each 10th slice
if ((i % 10) == 0) {
progressBar.innerHTML = "" + ((i + 1) / numberOfSlices * 100) + "%";
}
// This is a cool way to use a promise returning function.
const data = await readFilePromise(file, startOffset, endOffset)
// Compress the data
deflator.push(data, pako.Z_SYNC_FLUSH)
if (deflator.err) {
console.log(deflator.msg)
}
writer.write(deflator.result)
}
// This will fail if the authentication tag doesn't match
progressBar.innerHTML = "100%"
let endTime = performance.now(); //end time
let executionTimeInSeconds = (endTime - startTime) / 1000;
console.log('Compression time: '+ executionTimeInSeconds +' seconds');
} catch(error) {
console.log(error);
} finally {
if (typeof writer != "undefined") {
writer.close()
}
compressionDone = true
}
}
function createStream(fileName, fileSize) {
console.log("Creating a stream for file " + fileName + " with total size: " + fileSize)
// streamSaver.createWriteStream() returns a wirtable byte stream
// The WritableStream only accepts Uint8Array chunks
// (no other typed arrays, arrayBuffers or strings are allowed)
const fileStream = streamSaver.createWriteStream(fileName, {
size: fileSize, // (optional filesize) Will show progress
writableStrategy: undefined, // (optional)
readableStrategy: undefined // (optional)
})
const writer = fileStream.getWriter()
// abort so it dose not look stuck
window.onunload = () => {
fileStream.abort()
// also possible to call abort on the writer you got from `getWriter()`
writer.abort()
}
window.onbeforeunload = evt => {
if (!compressionDone) {
evt.returnValue = `Are you sure you want to leave?`;
alert("Ar you sure")
}
}
return writer
}
</script>
</body>
</html>