Skip to content

Commit

Permalink
wip: rewrite in typescript
Browse files Browse the repository at this point in the history
  • Loading branch information
hacdias committed Jan 20, 2023
1 parent b8bdcc4 commit 4ab49c4
Show file tree
Hide file tree
Showing 178 changed files with 3,220 additions and 2,110 deletions.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,14 @@
},
"files": [
"src",
"dist",
"dist/src",
"!dist/test",
"!**/*.tsbuildinfo"
],
"exports": {
".": {
"types": "./dist/src/index.d.ts",
"import": "./src/index.js",
"import": "./dist/src/index.js",
"require": "./dist/src/index.js"
}
},
Expand Down
125 changes: 0 additions & 125 deletions src/add-all.js

This file was deleted.

208 changes: 208 additions & 0 deletions src/add-all.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
import { CID } from 'multiformats/cid'
import { objectToCamel } from './lib/object-to-camel.js'
import { configure } from './lib/configure.js'
import { multipartRequest } from 'ipfs-core-utils/multipart-request'
import { toUrlSearchParams } from './lib/to-url-search-params.js'
import { abortSignal } from './lib/abort-signal.js'
import type { ClientOptions, IPFSCoreAddProgressFn, IPFSUtilsHttpUploadProgressFn } from './types.js'
import type { Version as CIDVersion } from 'multiformats/cid'
import type { Mtime } from 'ipfs-unixfs'
import type { ImportCandidateStream } from 'ipfs-core-types/src/utils.js'

export interface AddProgressFn { (bytes: number, path?: string): void }

export interface AddOptions extends ClientOptions {
/**
* Chunking algorithm used to build ipfs DAGs. (defaults to 'size-262144')
*/
chunker?: string
/**
* The CID version to use when storing the data
*/
cidVersion?: CIDVersion

/**
* Multihash hashing algorithm to use. (Defaults to 'sha2-256')
*/
hashAlg?: string

/**
* If true, will not add blocks to the blockstore. (Defaults to `false`)
*/
onlyHash?: boolean

/**
* Pin this object when adding. (Defaults to `true`)
*/
pin?: boolean

/**
* A function that will be called with the number of bytes added as a file is
* added to ipfs and the path of the file being added.
*
* **Note** It will not be called for directory entries.
*/
progress?: AddProgressFn

/**
* If true, DAG leaves will contain raw file data and not be wrapped in a
* protobuf. (Defaults to `false`)
*/
rawLeaves?: boolean

/**
* If true will use the
* [trickle DAG](https://godoc.org/github.com/ipsn/go-ipfs/gxlibs/github.com/ipfs/go-unixfs/importer/trickle)
* format for DAG generation. (Defaults to `false`).
*/
trickle?: boolean

/**
* Adds a wrapping node around the content. (Defaults to `false`)
*/
wrapWithDirectory?: boolean

/**
* Whether to preload all blocks created during this operation
*/
preload?: boolean

/**
* How many blocks from a file to write concurrently
*/
blockWriteConcurrency?: number
}

export interface AddAllOptions extends AddOptions {

/**
* Allows to create directories with an unlimited number of entries currently
* size of unixfs directories is limited by the maximum block size.
* ** Note ** that this is an experimental feature. (Defaults to `false`)
*/
enableShardingExperiment?: boolean

/**
* Directories with more than this number of files will be created as HAMT -
* sharded directories. (Defaults to 1000)
*/
shardSplitThreshold?: number

/**
* How many files to write concurrently
*/
fileImportConcurrency?: number
}

export interface AddResult {
cid: CID
size: number
path: string
mode?: number
mtime?: Mtime
}

export const createAddAll = configure((api) => {
/**
* Import multiple files and data into IPFS
*/
async function * addAll (source: ImportCandidateStream, options?: AddAllOptions): AsyncIterable<AddResult> {
// allow aborting requests on body errors
const controller = new AbortController()
const signal = abortSignal(controller.signal, options?.signal)
const { headers, body, total, parts } =
await multipartRequest(source, controller, options?.headers)

// In browser response body only starts streaming once upload is
// complete, at which point all the progress updates are invalid. If
// length of the content is computable we can interpret progress from
// `{ total, loaded}` passed to `onUploadProgress` and `multipart.total`
// in which case we disable progress updates to be written out.
const [progressFn, onUploadProgress] = typeof options?.progress === 'function'
? createProgressHandler(total, parts, options.progress)
: [undefined, undefined]

const res = await api.post('add', {
searchParams: toUrlSearchParams({
'stream-channels': true,
...options,
progress: Boolean(progressFn)
}),
onUploadProgress,
signal,
headers,
body
})

for await (let file of res.ndjson()) {
file = objectToCamel(file)

if (file.hash !== undefined) {
yield toCoreInterface(file)
} else if (progressFn != null) {
progressFn(file.bytes ?? 0, file.name)
}
}
}
return addAll
})

interface ProgressPart {
name: string
start: number
end: number
}

/**
* Returns simple progress callback when content length isn't computable or a
* progress event handler that calculates progress from upload progress events.
*/
const createProgressHandler = (total: number, parts: ProgressPart[]|null, progress: IPFSCoreAddProgressFn): [IPFSCoreAddProgressFn|undefined, IPFSUtilsHttpUploadProgressFn|undefined] =>
parts != null ? [undefined, createOnUploadProgress(total, parts, progress)] : [progress, undefined]

/**
* Creates a progress handler that interpolates progress from upload progress
* events and total size of the content that is added.
*/
const createOnUploadProgress = (size: number, parts: ProgressPart[], progress: IPFSCoreAddProgressFn): IPFSUtilsHttpUploadProgressFn => {
let index = 0
const count = parts.length
return ({ loaded, total }) => {
// Derive position from the current progress.
const position = Math.floor(loaded / total * size)
while (index < count) {
const { start, end, name } = parts[index]
// If within current part range report progress and break the loop
if (position < end) {
progress(position - start, name)
break
// If passed current part range report final byte for the chunk and
// move to next one.
} else {
progress(end - start, name)
index += 1
}
}
}
}

function toCoreInterface ({ name, hash, size, mode, mtime, mtimeNsecs }: any) {
const output: AddResult = {
path: name,
cid: CID.parse(hash),
size: parseInt(size)
}

if (mode != null) {
output.mode = parseInt(mode, 8)
}

if (mtime != null) {
output.mtime = {
secs: mtime,
nsecs: mtimeNsecs ?? 0
}
}

return output
}
15 changes: 7 additions & 8 deletions src/add.js → src/add.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
import { createAddAll } from './add-all.js'
import { AddOptions, createAddAll } from './add-all.js'
import last from 'it-last'
import { configure } from './lib/configure.js'
import { normaliseInput } from 'ipfs-core-utils/files/normalise-input-single'
import type { ImportCandidate } from 'ipfs-core-types/src/utils.js'
import type { AddResult, Options } from './types.js'

/**
* @param {import('./types').Options} options
*/
export function createAdd (options) {
export function createAdd (options: Options) {
const all = createAddAll(options)
return configure(() => {
/**
* @type {import('./types').RootAPI["add"]}
* Import a file or data into IPFS
*/
async function add (input, options = {}) {
async function add (input: ImportCandidate, options?: AddOptions): Promise<AddResult> {
const source = normaliseInput(input)
// @ts-expect-error - all may return undefined if source is empty
// // @ts-expect-error - all may return undefined if source is empty
const addAllPromise = all(source, options)
// @ts-expect-error - last may return undefined if source is empty
return await last(addAllPromise)
Expand Down
17 changes: 0 additions & 17 deletions src/bitswap/index.js

This file was deleted.

Loading

0 comments on commit 4ab49c4

Please sign in to comment.