Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add bottleneck function #145

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions benchmarks/curry/bottleneck.bench.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import * as _ from 'radashi'
import { bench } from 'vitest'

describe('bottleneck', () => {
bench('with no arguments', () => {
_.bottleneck()
})
})

14 changes: 14 additions & 0 deletions docs/curry/bottleneck.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
title: bottleneck
description: Restrict a function's call frequency to a specified rate
---

### Usage

Does a thing. Returns a value.

```ts
import * as _ from 'radashi'

_.bottleneck()
```
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
"@biomejs/biome": "^1.8.3",
"@vitest/coverage-v8": "2.0.5",
"cspell": "^8.13.3",
"flush-microtasks": "^1.0.1",
"prettier": "^3.3.2",
"prettier-plugin-pkg": "^0.18.1",
"prettier-plugin-sh": "^0.14.0",
Expand Down
149 changes: 149 additions & 0 deletions src/curry/bottleneck.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
declare const setTimeout: (callback: () => void, delay: number) => unknown

/**
* The options for the `bottleneck` function.
*
* @see https://radashi-org.github.io/reference/async/bottleneck
*/
export interface BottleneckOptions {
/**
* The maximum number of calls to allow per interval.
*
* @default 1
*/
max?: number
/**
* The interval at which to allow the maximum number of calls.
*/
interval: number
/**
* The maximum number of calls to allow at once.
*
* @default Infinity
*/
concurrency?: number
}

/**
* The return type of the `bottleneck` function.
*
* @see https://radashi-org.github.io/reference/async/bottleneck
*/
export interface BottledFunction<TArgs extends any[], TReturn> {
(...args: TArgs): Promise<TReturn>
/**
* Prevent any throttled calls from ever running.
*
* Currently executing calls are not affected.
*/
cancel(): void
}

/**
* Limit the rate at which a function is called.
*
* A maximum of `max` calls are allowed per `interval` milliseconds.
*
* Use the `concurrency` option for limiting the number of concurrent
* calls.
*
* @see https://radashi-org.github.io/reference/async/bottleneck
* @example
* ```ts
* const double = bottleneck(
* { max: 1, interval: 1000 },
* async (x: number) => x * 2
* )
* double(1) // <- Runs immediately
* double(2) // <- Will wait 1 second
* double(3) // <- Will wait 2 seconds
* ```
* @example Limited concurrency
* ```ts
* const double = bottleneck(
* { max: 5, interval: 1000, concurrency: 1 },
* async (x: number) => x * 2
* )
* double(1) // <- Runs immediately
* double(2) // <- Will wait for 1 to finish
* double(3) // <- Will wait for 2 to finish
* ```
*/
export function bottleneck<TArgs extends any[], TReturn>(
{
max = 1,
interval,
concurrency = Number.POSITIVE_INFINITY,
}: BottleneckOptions,
fn: (...args: TArgs) => TReturn,
): BottledFunction<TArgs, TReturn> {
let numCalls = 0
let numRunning = 0
let startTime: number | undefined

type QueueItem = {
args: TArgs
resolve: (value: TReturn | PromiseLike<TReturn>) => void
reject: (error: any) => void
}

const queue: QueueItem[] = []

async function run(input: TArgs | QueueItem) {
const now = Date.now()
startTime ??= now

if (now - startTime >= interval) {
startTime = now
numCalls = 0
}

if (numCalls < max && numRunning < concurrency) {
// If this is the first call, schedule the flush.
if (!numCalls && Number.isFinite(interval)) {
setTimeout(next, interval)
}

let result: any

numCalls++
numRunning++
try {
const args = Array.isArray(input) ? input : input.args
result = await fn(...args)
} catch (error) {
if (Array.isArray(input)) {
throw error
}
return input.reject(error)
} finally {
numRunning--
next()
}

return Array.isArray(input) ? result : input.resolve(result)
}

if (Array.isArray(input)) {
// Return a queue promise for the throttled call.
return new Promise<TReturn>((resolve, reject) => {
queue.push({ args: input, resolve, reject })
})
}

// Return the unused queue item to the queue.
queue.unshift(input)
}

// This function is called when the interval has elapsed and after
// every finished call.
const next = () => queue.length && run(queue.shift()!)

const bottled: BottledFunction<TArgs, any> = (...args) => run(args)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can create a QueueItem here, push it in the queue and call next(). That should help us to simplify run function to run(input: QueueItem).

Something like this:

const bottled: BottledFunction<TArgs, any> = (...args) => {
  const { promise, resolve, reject } = Promise.withResolvers();
  const item = { args, resolve, reject };
  
  queue.push(item);
  next();
  
  return promise;
}

Probably we need to create our internal withResolvers util since that API is very new.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably we need to create our internal withResolvers util since that API is very new.

Would you be interested in contributing that?

You can do this in terminal to create the files:

pnpm add-function async/withResolvers

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Agreed on the simplification, even if it means a little more GC work.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a PR #148

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

BTW @aleclarson add-functiondoesn't put an export into src/mod.ts. (also, we can use https://github.com/google/zx instead of sh, but maybe it's overengineering)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add-functiondoesn't put an export into src/mod.ts

Yeah I hope to fix that eventually 😅

The add-function script should have warned you though. Did it?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, no 🤔

$ pnpm add-function async/withResolvers

> radashi@12.1.0 add-function /home/vladimir/code/radashi
> bash ./scripts/add-function.sh "async/withResolvers"

Enter a description for withResolvers:
Ponyfill for Promise.withResolvers()


bottled.cancel = () => {
queue.length = 0
}

return bottled
}
1 change: 1 addition & 0 deletions src/mod.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ export * from './async/sleep.ts'
export * from './async/tryit.ts'
export * from './async/withResolvers.ts'

export * from './curry/bottleneck.ts'
export * from './curry/callable.ts'
export * from './curry/chain.ts'
export * from './curry/compose.ts'
Expand Down
156 changes: 156 additions & 0 deletions tests/curry/bottleneck.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import { flushMicroTasks } from 'flush-microtasks'
import * as _ from 'radashi'

const interval = 1000
const smidge = 10

describe('bottleneck', () => {
beforeEach(() => {
vi.useFakeTimers()
})

afterEach(() => {
vi.useRealTimers()
})

test('limit calls within interval', async () => {
const fn = vi.fn((x: number) => x)
const limitedFn = _.bottleneck({ max: 2, interval }, fn)

limitedFn(1)
limitedFn(2)
limitedFn(3)

expect(fn).toHaveBeenCalledTimes(2)
expect(fn).toHaveBeenNthCalledWith(1, 1)
expect(fn).toHaveBeenNthCalledWith(2, 2)

vi.advanceTimersByTime(interval - smidge)
expect(fn).toHaveBeenCalledTimes(2)

vi.advanceTimersByTime(smidge)
expect(fn).toHaveBeenCalledTimes(3)
expect(fn).toHaveBeenNthCalledWith(3, 3)
})

test('respect concurrency limit', async () => {
const fn = vi.fn(async (x: number) => {
await _.sleep(smidge)
return x
})
const limitedFn = _.bottleneck(
{ max: Number.POSITIVE_INFINITY, interval, concurrency: 1 },
fn,
)

const promise1 = limitedFn(1)
const promise2 = limitedFn(2)

expect(fn).toHaveBeenCalledTimes(1)
vi.advanceTimersByTime(smidge)
await flushMicroTasks()

expect(fn).toHaveBeenCalledTimes(2)
vi.advanceTimersByTime(interval)

const [result1, result2] = await Promise.all([promise1, promise2])

expect(result1).toBe(1)
expect(result2).toBe(2)
})

test('queue calls beyond max limit', async () => {
const fn = vi.fn((x: number) => x)
const limitedFn = _.bottleneck({ max: 1, interval }, fn)

const promise1 = limitedFn(1)
const promise2 = limitedFn(2)

expect(fn).toHaveBeenCalledTimes(1)
vi.advanceTimersByTime(smidge)

expect(fn).toHaveBeenCalledTimes(1)
vi.advanceTimersByTime(interval)

expect(fn).toHaveBeenCalledTimes(2)
vi.advanceTimersByTime(interval)

expect(fn).toHaveBeenCalledTimes(2)
limitedFn(3) // <- Should run immediately.
expect(fn).toHaveBeenCalledTimes(3)

const result1 = await promise1
expect(result1).toBe(1)

const result2 = await promise2
expect(result2).toBe(2)
})

test('error thrown by first call does not affect queued calls', async () => {
const fn = vi.fn(async (x: number) => {
await _.sleep(smidge)
if (x === 1) {
throw new Error('test')
}
return x
})
const limitedFn = _.bottleneck({ max: 1, interval }, fn)

const promise1 = limitedFn(1)
const promise2 = limitedFn(2)

vi.advanceTimersByTime(smidge)
await expect(promise1).rejects.toThrow('test')

expect(fn).toHaveBeenCalledTimes(1)

vi.advanceTimersByTime(interval)
await flushMicroTasks()

expect(fn).toHaveBeenCalledTimes(2)

await expect(promise2).resolves.toBe(2)
})

test('error thrown by queued call does not affect other queued calls', async () => {
const fn = vi.fn(async (x: number) => {
await _.sleep(smidge)
if (x === 2) {
throw new Error('test')
}
return x
})
const limitedFn = _.bottleneck({ max: 1, interval }, fn)

const promise1 = limitedFn(1)
const promise2 = limitedFn(2)
const promise3 = limitedFn(3)

vi.advanceTimersByTime(smidge)
await expect(promise1).resolves.toBe(1)

vi.advanceTimersByTime(interval)
await expect(promise2).rejects.toThrow('test')

vi.advanceTimersByTime(interval)
await expect(promise3).resolves.toBe(3)
})

describe('cancel method', () => {
test('cancels all queued calls', async () => {
const fn = vi.fn((x: number) => x)
const limitedFn = _.bottleneck({ max: 1, interval }, fn)

limitedFn(1)
limitedFn(2)

expect(fn).toHaveBeenCalledTimes(1)
limitedFn.cancel()

vi.advanceTimersByTime(interval)
await flushMicroTasks()

expect(fn).toHaveBeenCalledTimes(1)
})
})
})
Loading