Skip to content

Commit

Permalink
Allow making context specific requests using the data module (#32961)
Browse files Browse the repository at this point in the history
Co-authored-by: Nik Tsekouras <ntsekouras@outlook.com>
  • Loading branch information
youknowriad and ntsekouras committed Jun 25, 2021
1 parent 74a8873 commit 9e23649
Show file tree
Hide file tree
Showing 11 changed files with 360 additions and 188 deletions.
5 changes: 5 additions & 0 deletions packages/core-data/src/queried-data/get-query-parts.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ export function getQueryParts( query ) {
perPage: 10,
fields: null,
include: null,
context: 'default',
};

// Ensure stable key by sorting keys. Also more efficient for iterating.
Expand All @@ -65,6 +66,10 @@ export function getQueryParts( query ) {
);
break;

case 'context':
parts.context = value;
break;

default:
// While in theory, we could exclude "_fields" from the stableKey
// because two request with different fields have the same results
Expand Down
116 changes: 74 additions & 42 deletions packages/core-data/src/queried-data/reducer.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/**
* External dependencies
*/
import { map, flowRight, omit, forEach, filter } from 'lodash';
import { map, flowRight, omit, filter, mapValues } from 'lodash';

/**
* WordPress dependencies
Expand All @@ -20,6 +20,16 @@ import {
import { DEFAULT_ENTITY_KEY } from '../entities';
import getQueryParts from './get-query-parts';

function getContextFromAction( action ) {
const { query } = action;
if ( ! query ) {
return 'default';
}

const queryParts = getQueryParts( query );
return queryParts.context;
}

/**
* Returns a merged array of item IDs, given details of the received paginated
* items. The array is sparse-like with `undefined` entries where holes exist.
Expand Down Expand Up @@ -71,24 +81,30 @@ export function getMergedItemIds( itemIds, nextItemIds, page, perPage ) {
*
* @return {Object} Next state.
*/
function items( state = {}, action ) {
export function items( state = {}, action ) {
switch ( action.type ) {
case 'RECEIVE_ITEMS':
case 'RECEIVE_ITEMS': {
const context = getContextFromAction( action );
const key = action.key || DEFAULT_ENTITY_KEY;
return {
...state,
...action.items.reduce( ( accumulator, value ) => {
const itemId = value[ key ];
accumulator[ itemId ] = conservativeMapItem(
state[ itemId ],
value
);
return accumulator;
}, {} ),
[ context ]: {
...state[ context ],
...action.items.reduce( ( accumulator, value ) => {
const itemId = value[ key ];
accumulator[ itemId ] = conservativeMapItem(
state?.[ context ]?.[ itemId ],
value
);
return accumulator;
}, {} ),
},
};
}
case 'REMOVE_ITEMS':
const newState = omit( state, action.itemIds );
return newState;
return mapValues( state, ( contextState ) =>
omit( contextState, action.itemIds )
);
}
return state;
}
Expand All @@ -106,32 +122,45 @@ function items( state = {}, action ) {
* @return {Object<string,boolean>} Next state.
*/
export function itemIsComplete( state = {}, action ) {
const { type, query, key = DEFAULT_ENTITY_KEY } = action;
if ( type !== 'RECEIVE_ITEMS' ) {
return state;
switch ( action.type ) {
case 'RECEIVE_ITEMS': {
const context = getContextFromAction( action );
const { query, key = DEFAULT_ENTITY_KEY } = action;

// An item is considered complete if it is received without an associated
// fields query. Ideally, this would be implemented in such a way where the
// complete aggregate of all fields would satisfy completeness. Since the
// fields are not consistent across all entity types, this would require
// introspection on the REST schema for each entity to know which fields
// compose a complete item for that entity.
const queryParts = query ? getQueryParts( query ) : {};
const isCompleteQuery =
! query || ! Array.isArray( queryParts.fields );

return {
...state,
[ context ]: {
...state[ context ],
...action.items.reduce( ( result, item ) => {
const itemId = item[ key ];

// Defer to completeness if already assigned. Technically the
// data may be outdated if receiving items for a field subset.
result[ itemId ] =
state?.[ context ]?.[ itemId ] || isCompleteQuery;

return result;
}, {} ),
},
};
}
case 'REMOVE_ITEMS':
return mapValues( state, ( contextState ) =>
omit( contextState, action.itemIds )
);
}

// An item is considered complete if it is received without an associated
// fields query. Ideally, this would be implemented in such a way where the
// complete aggregate of all fields would satisfy completeness. Since the
// fields are not consistent across all entity types, this would require
// introspection on the REST schema for each entity to know which fields
// compose a complete item for that entity.
const isCompleteQuery =
! query || ! Array.isArray( getQueryParts( query ).fields );

return {
...state,
...action.items.reduce( ( result, item ) => {
const itemId = item[ key ];

// Defer to completeness if already assigned. Technically the
// data may be outdated if receiving items for a field subset.
result[ itemId ] = state[ itemId ] || isCompleteQuery;

return result;
}, {} ),
};
return state;
}

/**
Expand Down Expand Up @@ -163,6 +192,8 @@ const receiveQueries = flowRight( [
return action;
} ),

onSubKey( 'context' ),

// Queries shape is shared, but keyed by query `stableKey` part. Original
// reducer tracks only a single query object.
onSubKey( 'stableKey' ),
Expand Down Expand Up @@ -194,17 +225,18 @@ const queries = ( state = {}, action ) => {
case 'RECEIVE_ITEMS':
return receiveQueries( state, action );
case 'REMOVE_ITEMS':
const newState = { ...state };
const removedItems = action.itemIds.reduce( ( result, itemId ) => {
result[ itemId ] = true;
return result;
}, {} );
forEach( newState, ( queryItems, key ) => {
newState[ key ] = filter( queryItems, ( queryId ) => {
return ! removedItems[ queryId ];

return mapValues( state, ( contextQueries ) => {
return mapValues( contextQueries, ( queryItems ) => {
return filter( queryItems, ( queryId ) => {
return ! removedItems[ queryId ];
} );
} );
} );
return newState;
default:
return state;
}
Expand Down
22 changes: 14 additions & 8 deletions packages/core-data/src/queried-data/selectors.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,16 @@ const queriedItemsCacheByState = new WeakMap();
* @return {?Array} Query items.
*/
function getQueriedItemsUncached( state, query ) {
const { stableKey, page, perPage, include, fields } = getQueryParts(
query
);
const {
stableKey,
page,
perPage,
include,
fields,
context,
} = getQueryParts( query );
let itemIds;

if ( Array.isArray( include ) && ! stableKey ) {
// If the parsed query yields a set of IDs, but otherwise no filtering,
// it's safe to consider targeted item IDs as the include set. This
Expand All @@ -40,8 +46,8 @@ function getQueriedItemsUncached( state, query ) {
itemIds = include;
// TODO: Avoid storing the empty stable string in reducer, since it
// can be computed dynamically here always.
} else if ( state.queries[ stableKey ] ) {
itemIds = state.queries[ stableKey ];
} else if ( state.queries?.[ context ]?.[ stableKey ] ) {
itemIds = state.queries[ context ][ stableKey ];
}

if ( ! itemIds ) {
Expand All @@ -61,11 +67,11 @@ function getQueriedItemsUncached( state, query ) {
continue;
}

if ( ! state.items.hasOwnProperty( itemId ) ) {
if ( ! state.items[ context ]?.hasOwnProperty( itemId ) ) {
return null;
}

const item = state.items[ itemId ];
const item = state.items[ context ][ itemId ];

let filteredItem;
if ( Array.isArray( fields ) ) {
Expand All @@ -79,7 +85,7 @@ function getQueriedItemsUncached( state, query ) {
} else {
// If expecting a complete item, validate that completeness, or
// otherwise abort.
if ( ! state.itemIsComplete[ itemId ] ) {
if ( ! state.itemIsComplete[ context ]?.[ itemId ] ) {
return null;
}

Expand Down
20 changes: 20 additions & 0 deletions packages/core-data/src/queried-data/test/get-query-parts.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ describe( 'getQueryParts', () => {
const parts = getQueryParts( { page: 2, per_page: 2 } );

expect( parts ).toEqual( {
context: 'default',
page: 2,
perPage: 2,
stableKey: '',
Expand All @@ -20,6 +21,7 @@ describe( 'getQueryParts', () => {
const parts = getQueryParts( { include: [ 1 ] } );

expect( parts ).toEqual( {
context: 'default',
page: 1,
perPage: 10,
stableKey: '',
Expand All @@ -34,6 +36,7 @@ describe( 'getQueryParts', () => {

expect( first ).toEqual( second );
expect( first ).toEqual( {
context: 'default',
page: 1,
perPage: 10,
stableKey: '%3F=%26&b=2',
Expand All @@ -46,6 +49,7 @@ describe( 'getQueryParts', () => {
const parts = getQueryParts( { a: [ 1, 2 ] } );

expect( parts ).toEqual( {
context: 'default',
page: 1,
perPage: 10,
stableKey: 'a%5B0%5D=1&a%5B1%5D=2',
Expand All @@ -60,6 +64,7 @@ describe( 'getQueryParts', () => {

expect( first ).toEqual( second );
expect( first ).toEqual( {
context: 'default',
page: 1,
perPage: 10,
stableKey: 'b=2',
Expand All @@ -72,6 +77,7 @@ describe( 'getQueryParts', () => {
const parts = getQueryParts( { b: 2, page: 1, per_page: -1 } );

expect( parts ).toEqual( {
context: 'default',
page: 1,
perPage: -1,
stableKey: 'b=2',
Expand All @@ -84,11 +90,25 @@ describe( 'getQueryParts', () => {
const parts = getQueryParts( { _fields: [ 'id', 'title' ] } );

expect( parts ).toEqual( {
context: 'default',
page: 1,
perPage: 10,
stableKey: '_fields=id%2Ctitle',
fields: [ 'id', 'title' ],
include: null,
} );
} );

it( 'returns the context as a dedicated query part', () => {
const parts = getQueryParts( { context: 'view' } );

expect( parts ).toEqual( {
page: 1,
perPage: 10,
stableKey: '',
include: null,
fields: null,
context: 'view',
} );
} );
} );
Loading

0 comments on commit 9e23649

Please sign in to comment.