Skip to content

Commit

Permalink
feat: add getChunk parameters (#384)
Browse files Browse the repository at this point in the history
* feat: add `getChunk` parameters

* refactor(app): chunk checksum
  • Loading branch information
kukhariev authored Jun 4, 2022
1 parent e222335 commit e1b952f
Show file tree
Hide file tree
Showing 4 changed files with 82 additions and 46 deletions.
67 changes: 52 additions & 15 deletions src/app/digest.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,34 @@
import { Canceler, RequestConfig, Uploader } from 'ngx-uploadx';

export function readBlob(body: Blob, canceler?: Canceler): Promise<ArrayBuffer> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
canceler && (canceler.onCancel = () => reject('aborted' && reader.abort()));
reader.onload = () => resolve(reader.result as ArrayBuffer);
reader.onerror = reject;
reader.readAsArrayBuffer(body);
});
}

export function bufferToHex(buf: ArrayBuffer) {
return Array.from(new Uint8Array(buf), x => x.toString(16).padStart(2, '0')).join('');
}

export function bufferToBase64(hash: ArrayBuffer) {
return btoa(String.fromCharCode(...new Uint8Array(hash)));
}

export const hasher = {
lookup: {} as Record<string, { key: string; sha: string }>,
isSupported: window.crypto && !!window.crypto.subtle,
async sha(data: ArrayBuffer): Promise<string> {
const dig = await crypto.subtle.digest('SHA-1', data);
return String.fromCharCode(...new Uint8Array(dig));
async sha(data: ArrayBuffer): Promise<ArrayBuffer> {
return crypto.subtle.digest('SHA-1', data);
},
digestHex(body: Blob, canceler?: Canceler): Promise<string> {
return readBlob(body, canceler).then(buffer => this.sha(buffer).then(bufferToHex));
},
getDigest(body: Blob, canceler?: Canceler): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
canceler && (canceler.onCancel = () => reject('aborted' && reader.abort()));
reader.onload = async () => resolve(await this.sha(reader.result as ArrayBuffer));
reader.onerror = reject;
reader.readAsArrayBuffer(body);
});
digestBase64(body: Blob, canceler?: Canceler): Promise<string> {
return readBlob(body, canceler).then(buffer => this.sha(buffer).then(bufferToBase64));
}
};

Expand All @@ -22,8 +37,19 @@ export async function injectTusChecksumHeader(
req: RequestConfig
): Promise<RequestConfig> {
if (hasher.isSupported && req.body instanceof Blob) {
const sha = await hasher.getDigest(req.body, req.canceler);
Object.assign(req.headers, { 'Upload-Checksum': `sha1 ${btoa(sha)}` });
if (this.chunkSize) {
const { body, start } = this.getChunk((this.offset || 0) + this.chunkSize);
hasher.digestBase64(body, req.canceler).then(digest => {
const key = `${body.size}-${start}`;
hasher.lookup[req.url] = { key, sha: digest };
});
}
const key = `${req.body.size}-${this.offset}`;
const sha =
hasher.lookup[req.url]?.key === key
? hasher.lookup[req.url].sha
: await hasher.digestBase64(req.body, req.canceler);
Object.assign(req.headers, { 'Upload-Checksum': `sha1 ${sha}` });
}
return req;
}
Expand All @@ -33,8 +59,19 @@ export async function injectDigestHeader(
req: RequestConfig
): Promise<RequestConfig> {
if (hasher.isSupported && req.body instanceof Blob) {
const sha = await hasher.getDigest(req.body, req.canceler);
Object.assign(req.headers, { Digest: `sha=${btoa(sha)}` });
if (this.chunkSize) {
const { body, start } = this.getChunk((this.offset || 0) + this.chunkSize);
hasher.digestBase64(body, req.canceler).then(digest => {
const key = `${body.size}-${start}`;
hasher.lookup[req.url] = { key, sha: digest };
});
}
const key = `${req.body.size}-${this.offset}`;
const sha =
hasher.lookup[req.url]?.key === key
? hasher.lookup[req.url].sha
: await hasher.digestBase64(req.body, req.canceler);
Object.assign(req.headers, { Digest: `sha=${sha}` });
}
return req;
}
2 changes: 1 addition & 1 deletion src/app/on-push/on-push.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export class OnPushComponent implements OnDestroy {
uploads$: Observable<Uploader[]>;
options: UploadxOptions = {
endpoint: `${environment.api}/files?uploadType=uploadx`,
chunkSize: 1024 * 1024 * 8,
chunkSize: 1024 * 1024 * 64,
prerequest: injectDigestHeader,
authorize: (req, token) => {
token && (req.headers['Authorization'] = `Token ${token}`);
Expand Down
46 changes: 20 additions & 26 deletions src/app/service-way/service-way.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,10 @@ export class CustomId implements IdService {
return new Date().getTime().toString(36);
}
const blob = uploader.file.slice(0, 256);
return await hasher.getDigest(blob);
return hasher.digestHex(blob);
}
}

// eslint-disable-next-line max-classes-per-file
@Component({
selector: 'app-service-way',
templateUrl: './service-way.component.html',
Expand All @@ -27,34 +26,29 @@ export class ServiceWayComponent implements OnDestroy, OnInit {
state$!: Observable<UploadState>;
uploads: UploadState[] = [];
private unsubscribe$ = new Subject<void>();
options!: UploadxOptions;
options: UploadxOptions = {
endpoint: `${environment.api}/files?uploadType=tus`,
uploaderClass: Tus,
token: this.authService.getAccessToken
// prerequest: injectTusChecksumHeader
};

constructor(private uploadxService: UploadxService, private authService: AuthService) {}

ngOnInit(): void {
const endpoint = `${environment.api}/files?uploadType=tus`;
this.uploadxService.request({ method: 'OPTIONS', url: endpoint }).then(
({ headers }) => {
console.table(headers);
const checkSumSupported =
hasher.isSupported && ('Tus-Checksum-Algorithm' in headers || true); // debug
this.options = {
endpoint,
uploaderClass: Tus,
token: this.authService.getAccessToken,
prerequest: checkSumSupported ? injectTusChecksumHeader : () => {}
};
this.state$ = this.uploadxService.init(this.options);
this.state$.pipe(takeUntil(this.unsubscribe$)).subscribe(state => {
const target = this.uploads.find(item => item.uploadId === state.uploadId);
target ? Object.assign(target, state) : this.uploads.push(state);
});
},
e => {
console.error(e);
this.ngOnInit();
}
);
this.state$ = this.uploadxService.init(this.options);
this.state$.pipe(takeUntil(this.unsubscribe$)).subscribe(state => {
const target = this.uploads.find(item => item.uploadId === state.uploadId);
target ? Object.assign(target, state) : this.uploads.push(state);
});

this.uploadxService.ajax
.request({ method: 'OPTIONS', url: this.options.endpoint! })
.then(({ headers }) => {
if (hasher.isSupported && headers['tus-checksum-algorithm'].includes('sha1')) {
this.uploadxService.options.prerequest = injectTusChecksumHeader;
}
}, console.error);
}

ngOnDestroy(): void {
Expand Down
13 changes: 9 additions & 4 deletions src/uploadx/lib/uploader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -254,15 +254,20 @@ export abstract class Uploader implements UploadState {
return this.responseHeaders[key.toLowerCase()] || null;
}

protected getChunk(): { start: number; end: number; body: Blob } {
/**
* Get file chunk
* @param offset - number of bytes of the file to skip
* @param size - chunk size
*/
getChunk(offset?: number, size?: number): { start: number; end: number; body: Blob } {
if (this.responseStatus === 413) {
DynamicChunk.maxSize = DynamicChunk.size = Math.floor(DynamicChunk.size / 2);
}
this.chunkSize =
this.options.chunkSize === 0 ? this.size : this.options.chunkSize || DynamicChunk.size;
const start = this.offset || 0;
const end = Math.min(start + this.chunkSize, this.size);
const body = this.file.slice(this.offset, end);
const start = offset ?? this.offset ?? 0;
const end = Math.min(start + (size || this.chunkSize), this.size);
const body = this.file.slice(start, end);
return { start, end, body };
}

Expand Down

0 comments on commit e1b952f

Please sign in to comment.