Skip to content

Commit

Permalink
[RFC] Batch loads distinct keys with disabled cache.
Browse files Browse the repository at this point in the history
Based on conversation from #44
  • Loading branch information
leebyron committed Sep 24, 2016
1 parent f119438 commit 67be4f3
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 16 deletions.
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,10 @@ Create a new `DataLoader` given a batch loading function and options.
passed in to the `batchLoadFn`.

- *cache*: Default `true`. Set to `false` to disable caching, instead
creating a new Promise and new key in the `batchLoadFn` for every load.
creating a new Promise and another call to `batchLoadFn` when a key which
was requested in the past is requested again. However, if the same key is
requested multiple times in the same tick of the run-loop, it will still
only appear once in the `keys` provided to `batchLoadFn`.

- *cacheKeyFn*: A function to produce a cache key for a given load key.
Defaults to `key => key`. Useful to provide when JavaScript objects are keys
Expand Down
18 changes: 18 additions & 0 deletions src/__tests__/dataloader-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -509,6 +509,24 @@ describe('Accepts options', () => {
);
});

it('Still batches distinct keys when cache disabled', async () => {
var [ identityLoader, loadCalls ] = idLoader({ cache: false });

var [ values1, values2, values3, values4 ] = await Promise.all([
identityLoader.load('A'),
identityLoader.load('C'),
identityLoader.load('D'),
identityLoader.loadMany([ 'C', 'D', 'A', 'A', 'B' ]),
]);

expect(values1).to.equal('A');
expect(values2).to.equal('C');
expect(values3).to.equal('D');
expect(values4).to.deep.equal([ 'C', 'D', 'A', 'A', 'B' ]);

expect(loadCalls).to.deep.equal([ [ 'A', 'C', 'D', 'B' ] ]);
});

describe('Accepts object key in custom cacheKey function', () => {
function cacheKey(key) {
return Object.keys(key).sort().map(k => k + ':' + key[k]).join();
Expand Down
8 changes: 5 additions & 3 deletions src/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,11 @@ type Options<K, V> = {
maxBatchSize?: number;

/**
* Default `true`. Set to `false` to disable caching,
* instead creating a new Promise and new key in
* the `batchLoadFn` for every load.
* Default `true`. Set to `false` to disable caching, instead
* creating a new Promise and another call to `batchLoadFn` when a key which
* was requested in the past is requested again. However, if the same key is
* requested multiple times in the same tick of the run-loop, it will still
* only appear once in the `keys` provided to `batchLoadFn`.
*/
cache?: boolean,

Expand Down
29 changes: 17 additions & 12 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,13 @@ export default class DataLoader<K, V> {
// Determine options
var options = this._options;
var shouldBatch = !options || options.batch !== false;
var shouldCache = !options || options.cache !== false;
var cacheKeyFn = options && options.cacheKeyFn;
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;

// If caching and there is a cache-hit, return cached Promise.
if (shouldCache) {
var cachedPromise = this._promiseCache.get(cacheKey);
if (cachedPromise) {
return cachedPromise;
}
// If this key was already requested, return the existing Promise.
var cachedPromise = this._promiseCache.get(cacheKey);
if (cachedPromise) {
return cachedPromise;
}

// Otherwise, produce a new Promise for this value.
Expand All @@ -109,10 +106,8 @@ export default class DataLoader<K, V> {
}
});

// If caching, cache this promise.
if (shouldCache) {
this._promiseCache.set(cacheKey, promise);
}
// Cache this promise.
this._promiseCache.set(cacheKey, promise);

return promise;
}
Expand Down Expand Up @@ -222,9 +217,19 @@ function dispatchQueue<K, V>(loader: DataLoader<K, V>) {
var queue = loader._queue;
loader._queue = [];

// DataLoader always maintains a cache in order to only request distinct keys
// within a given dispatch. However if caching was disabled, then this cache
// will be cleared as soon as the dispatch has begun, restricting the duration
// of the cache to a single tick of the run loop.
var options = loader._options;
var shouldCache = !options || options.cache !== false;
if (!shouldCache) {
loader._promiseCache.clear();
}

// If a maxBatchSize was provided and the queue is longer, then segment the
// queue into multiple batches, otherwise treat the queue as a single batch.
var maxBatchSize = loader._options && loader._options.maxBatchSize;
var maxBatchSize = options && options.maxBatchSize;
if (maxBatchSize && maxBatchSize > 0 && maxBatchSize < queue.length) {
for (var i = 0; i < queue.length / maxBatchSize; i++) {
dispatchQueueBatch(
Expand Down

0 comments on commit 67be4f3

Please sign in to comment.