diff --git a/CHANGELOG.md b/CHANGELOG.md
index 818e4a88eb8..fc24531ecb1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -24,6 +24,9 @@
- Report single `MissingFieldError` instead of a potentially very large `MissingFieldError[]` array for incomplete cache reads, improving performance and memory usage.
[@benjamn](https://github.com/benjamn) in [#8734](https://github.com/apollographql/apollo-client/pull/8734)
+- When writing results into `InMemoryCache`, each written object is now identified using `policies.identify` _after_ traversing the fields of the object (rather than before), simplifying identification and reducing duplicate work. If you have custom `keyFields` functions, they still receive the raw result object as their first parameter, but the `KeyFieldsContext` parameter now provides `context.storeObject` (the `StoreObject` just processed by `processSelectionSet`) and `context.readField` (a helper function for reading fields from `context.storeObject` and any `Reference`s it might contain, similar to `readField` for `read`, `merge`, and `cache.modify` functions).
+ [@benjamn](https://github.com/benjamn) in [#8996](https://github.com/apollographql/apollo-client/pull/8996)
+
- Ensure `cache.identify` never throws when primary key fields are missing, and include the source object in the error message when `keyFields` processing fails.
[@benjamn](https://github.com/benjamn) in [#8679](https://github.com/apollographql/apollo-client/pull/8679)
diff --git a/package.json b/package.json
index 50892156c60..a9f3b76ef65 100644
--- a/package.json
+++ b/package.json
@@ -55,7 +55,7 @@
{
"name": "apollo-client",
"path": "./dist/apollo-client.min.cjs",
- "maxSize": "28.25 kB"
+ "maxSize": "28.45 kB"
}
],
"engines": {
diff --git a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap
index 3d73862a87d..3ed68fa48cf 100644
--- a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap
+++ b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap
@@ -65,6 +65,17 @@ exports[`type policies complains about missing key fields 1`] = `
\\"name\\": \\"James Gleick\\"
}
}
+}",
+ ],
+ Array [
+ "Missing field 'year' while writing result {
+ \\"__typename\\": \\"Book\\",
+ \\"isbn\\": \\"1400096235\\",
+ \\"title\\": \\"The Information\\",
+ \\"subtitle\\": \\"A History, a Theory, a Flood\\",
+ \\"author\\": {
+ \\"name\\": \\"James Gleick\\"
+ }
}",
],
],
@@ -73,6 +84,10 @@ exports[`type policies complains about missing key fields 1`] = `
"type": "return",
"value": undefined,
},
+ Object {
+ "type": "return",
+ "value": undefined,
+ },
],
}
`;
diff --git a/src/cache/inmemory/__tests__/key-extractor.ts b/src/cache/inmemory/__tests__/key-extractor.ts
new file mode 100644
index 00000000000..729412a9daa
--- /dev/null
+++ b/src/cache/inmemory/__tests__/key-extractor.ts
@@ -0,0 +1,182 @@
+import { KeySpecifier } from "../policies";
+import { canonicalStringify } from "../object-canon";
+import {
+ getSpecifierPaths,
+ collectSpecifierPaths,
+ extractKeyPath,
+} from "../key-extractor";
+
+describe("keyFields and keyArgs extraction", () => {
+ it("getSpecifierPaths should work for various specifiers", () => {
+ function check(specifier: KeySpecifier, expected: string[][]) {
+ const actualPaths = getSpecifierPaths(specifier);
+ expect(actualPaths).toEqual(expected);
+ // Make sure paths lookup is cached.
+ expect(getSpecifierPaths(specifier)).toBe(actualPaths);
+ }
+
+ check([], []);
+ check(["a"], [["a"]]);
+
+ check(["a", "b", "c"], [
+ ["a"],
+ ["b"],
+ ["c"]
+ ]);
+
+ check(["a", ["b", "c"], "d"], [
+ ["a", "b"],
+ ["a", "c"],
+ ["d"],
+ ]);
+
+ check(["a", "b", ["c"], "d"], [
+ ["a"],
+ ["b", "c"],
+ ["d"],
+ ]);
+
+ check(["a", "b", ["c", ["d", ["e", "f"], "g"]]], [
+ ["a"],
+ ["b", "c", "d", "e"],
+ ["b", "c", "d", "f"],
+ ["b", "c", "g"],
+ ]);
+ });
+
+ it("collectSpecifierPaths should work for various specifiers", () => {
+ const object = {
+ a: 123,
+ b: {
+ d: {
+ f: 567,
+ e: 456,
+ },
+ c: 234,
+ g: 678,
+ },
+ h: 789,
+ };
+
+ function collect(specifier: KeySpecifier) {
+ return collectSpecifierPaths(
+ specifier,
+ path => extractKeyPath(object, path)
+ );
+ }
+
+ function check(
+ specifier: KeySpecifier,
+ expected: Record,
+ ) {
+ const actual = collect(specifier);
+ expect(actual).toEqual(expected);
+ // Not only must actual and expected be equal, but their key orderings
+ // must also be the same.
+ expect(JSON.stringify(actual)).toBe(JSON.stringify(expected));
+ }
+
+ check([], {});
+
+ check(["a", "h"], {
+ a: 123,
+ h: 789,
+ });
+
+
+ check(["h", "a", "bogus"], {
+ h: 789,
+ a: 123,
+ });
+
+ check(["b", ["d", ["e"]]], {
+ b: { d: { e: 456 }}
+ });
+
+ check(["b", ["d", ["e"]], "a"], {
+ b: { d: { e: 456 }},
+ a: 123,
+ });
+
+ check(["b", ["g", "d"], "a"], {
+ b: {
+ g: 678,
+ d: {
+ e: 456,
+ f: 567,
+ },
+ },
+ a: 123,
+ });
+
+ check(["b", "a"], {
+ b: {
+ // Notice that the keys of each nested object are sorted, despite being
+ // out of order in the original object.
+ c: 234,
+ d: {
+ e: 456,
+ f: 567,
+ },
+ g: 678,
+ },
+ // This a key comes after the b key, however, because we requested that
+ // ordering with the ["b", "a"] specifier array.
+ a: 123,
+ });
+ });
+
+ it("extractKeyPath can handle arrays", () => {
+ const object = {
+ extra: "read about it elsewhere",
+ array: [
+ { value: 1, a: "should be first" },
+ { value: 2, x: "should come after value" },
+ { z: "should come last", value: 3 },
+ ],
+ more: "another word for extra",
+ };
+
+ expect(
+ extractKeyPath(object, ["array", "value"]),
+ ).toEqual([1, 2, 3]);
+
+ expect(collectSpecifierPaths(
+ ["array", ["value", "a", "x", "z"]],
+ path => {
+ expect(path.length).toBe(2);
+ expect(path[0]).toBe("array");
+ expect(["value", "a", "x", "z"]).toContain(path[1]);
+ return extractKeyPath(object, path);
+ },
+ )).toEqual({
+ array: {
+ value: object.array.map(item => item.value),
+ a: ["should be first", void 0, void 0],
+ x: [void 0, "should come after value", void 0],
+ z: [void 0, void 0, "should come last"],
+ },
+ });
+
+ // This test case is "underspecified" because the specifier array ["array"]
+ // does not name any nested fields to pull from each array element.
+ const underspecified = extractKeyPath(object, ["array"])
+ expect(underspecified).toEqual(object.array);
+ const understringified = JSON.stringify(underspecified);
+ // Although the objects are structurally equal, they do not stringify the
+ // same, since the underspecified keys have been stably sorted.
+ expect(understringified).not.toEqual(JSON.stringify(object.array));
+
+ expect(understringified).toBe(JSON.stringify([
+ // Note that a: object.array[0].a has become the first key, because "a"
+ // precedes "value" alphabetically.
+ { a: object.array[0].a, value: 1 },
+ { value: 2, x: object.array[1].x },
+ { value: 3, z: object.array[2].z },
+ ]));
+
+ // This new ordering also happens to be the canonical/stable ordering,
+ // according to canonicalStringify.
+ expect(understringified).toBe(canonicalStringify(object.array));
+ });
+});
diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts
index 50781f77e17..d8b5d16d64c 100644
--- a/src/cache/inmemory/__tests__/policies.ts
+++ b/src/cache/inmemory/__tests__/policies.ts
@@ -224,6 +224,99 @@ describe("type policies", function () {
checkAuthorName(cache);
});
+ it("serializes nested keyFields objects in stable order", function () {
+ const cache = new InMemoryCache({
+ typePolicies: {
+ Book: {
+ // If you explicitly specify the order of author sub-fields, there
+ // will be no ambiguity about how the author object should be
+ // serialized. However, cache IDs should at least be stably
+ // stringified if the child property names are omitted, as below.
+ // keyFields: ["title", "author", ["firstName", "lastName"]],
+ keyFields: ["title", "author"],
+ },
+ },
+ });
+
+ const query = gql`
+ query {
+ book {
+ title
+ writer: author {
+ first: firstName
+ lastName
+ }
+ }
+ }
+ `;
+
+ cache.writeQuery({
+ query,
+ data: {
+ book: {
+ __typename: "Book",
+ writer: {
+ // The order of fields shouldn't matter here, since cache
+ // identification will stringify them in a stable order.
+ first: "Rebecca",
+ lastName: "Schwarzlose",
+ __typename: "Person",
+ },
+ title: "Brainscapes",
+ },
+ },
+ });
+
+ cache.writeQuery({
+ query,
+ data: {
+ book: {
+ __typename: "Book",
+ title: "The Science of Can and Can't",
+ writer: {
+ // The order of fields shouldn't matter here, since cache
+ // identification will stringify them in a stable order.
+ lastName: "Marletto",
+ __typename: "Person",
+ first: "Chiarra",
+ },
+ },
+ },
+ });
+
+ expect(cache.extract(true)).toEqual({
+ // The order of the author object's __typename, firstName, and lastName
+ // fields has been determined by our keyFields configuration and stable
+ // stringification.
+ 'Book:{"title":"Brainscapes","author":{"__typename":"Person","firstName":"Rebecca","lastName":"Schwarzlose"}}': {
+ __typename: "Book",
+ title: "Brainscapes",
+ author: {
+ __typename: "Person",
+ firstName: "Rebecca",
+ lastName: "Schwarzlose",
+ },
+ },
+ // Again, __typename, firstName, and then lastName, despite the different
+ // order of keys in the data we wrote.
+ 'Book:{"title":"The Science of Can and Can\'t","author":{"__typename":"Person","firstName":"Chiarra","lastName":"Marletto"}}': {
+ __typename: "Book",
+ title: "The Science of Can and Can't",
+ author: {
+ __typename: "Person",
+ firstName: "Chiarra",
+ lastName: "Marletto",
+ },
+ },
+ ROOT_QUERY: {
+ __typename: "Query",
+ book: {
+ __ref: 'Book:{"title":"The Science of Can and Can\'t","author":{"__typename":"Person","firstName":"Chiarra","lastName":"Marletto"}}',
+ },
+ },
+ });
+ });
+
it("accepts keyFields functions", function () {
const cache = new InMemoryCache({
typePolicies: {
diff --git a/src/cache/inmemory/key-extractor.ts b/src/cache/inmemory/key-extractor.ts
new file mode 100644
index 00000000000..fcf36d85230
--- /dev/null
+++ b/src/cache/inmemory/key-extractor.ts
@@ -0,0 +1,261 @@
+import { invariant } from "../../utilities/globals";
+
+import {
+ argumentsObjectFromField,
+ DeepMerger,
+ isNonEmptyArray,
+ isNonNullObject,
+} from "../../utilities";
+
+import { hasOwn } from "./helpers";
+import {
+ KeySpecifier,
+ KeyFieldsFunction,
+ KeyArgsFunction,
+} from "./policies";
+
+// Mapping from JSON-encoded KeySpecifier strings to associated information.
+const specifierInfoCache: Record = Object.create(null);
+
+function lookupSpecifierInfo(spec: KeySpecifier) {
+ // It's safe to encode KeySpecifier arrays with JSON.stringify, since they're
+ // just arrays of strings or nested KeySpecifier arrays, and the order of the
+ // array elements is important (and suitably preserved by JSON.stringify).
+ const cacheKey = JSON.stringify(spec);
+ return specifierInfoCache[cacheKey] ||
+ (specifierInfoCache[cacheKey] = Object.create(null));
+}
+
+export function keyFieldsFnFromSpecifier(
+ specifier: KeySpecifier,
+): KeyFieldsFunction {
+ const info = lookupSpecifierInfo(specifier);
+
+ return info.keyFieldsFn || (info.keyFieldsFn = (
+ object,
+ context,
+ ) => {
+ const extract: typeof extractKey =
+ (from, key) => context.readField(key, from);
+
+ const keyObject = context.keyObject = collectSpecifierPaths(
+ specifier,
+ schemaKeyPath => {
+ let extracted = extractKeyPath(
+ context.storeObject,
+ schemaKeyPath,
+ // Using context.readField to extract paths from context.storeObject
+ // allows the extraction to see through Reference objects and respect
+ // custom read functions.
+ extract,
+ );
+
+ if (
+ extracted === void 0 &&
+ object !== context.storeObject &&
+ hasOwn.call(object, schemaKeyPath[0])
+ ) {
+ // If context.storeObject fails to provide a value for the requested
+ // path, fall back to the raw result object, if it has a top-level key
+ // matching the first key in the path (schemaKeyPath[0]). This allows
+ // key fields included in the written data to be saved in the cache
+ // even if they are not selected explicitly in context.selectionSet.
+ // Not being mentioned by context.selectionSet is convenient here,
+ // since it means these extra fields cannot be affected by field
+ // aliasing, which is why we can use extractKey instead of
+ // context.readField for this extraction.
+ extracted = extractKeyPath(object, schemaKeyPath, extractKey);
+ }
+
+ invariant(
+ extracted !== void 0,
+ `Missing field '${schemaKeyPath.join('.')}' while extracting keyFields from ${
+ JSON.stringify(object)
+ }`,
+ );
+
+ return extracted;
+ },
+ );
+
+ return `${context.typename}:${JSON.stringify(keyObject)}`;
+ });
+}
+
+// The keyArgs extraction process is roughly analogous to keyFields extraction,
+// but there are no aliases involved, missing fields are tolerated (by merely
+// omitting them from the key), and drawing from field.directives or variables
+// is allowed (in addition to drawing from the field's arguments object).
+// Concretely, these differences mean passing a different key path extractor
+// function to collectSpecifierPaths, reusing the shared extractKeyPath helper
+// wherever possible.
+export function keyArgsFnFromSpecifier(specifier: KeySpecifier): KeyArgsFunction {
+ const info = lookupSpecifierInfo(specifier);
+
+ return info.keyArgsFn || (info.keyArgsFn = (args, {
+ field,
+ variables,
+ fieldName,
+ }) => {
+ const collected = collectSpecifierPaths(specifier, keyPath => {
+ const firstKey = keyPath[0];
+ const firstChar = firstKey.charAt(0);
+
+ if (firstChar === "@") {
+ if (field && isNonEmptyArray(field.directives)) {
+ const directiveName = firstKey.slice(1);
+ // If the directive appears multiple times, only the first
+ // occurrence's arguments will be used. TODO Allow repetition?
+ // TODO Cache this work somehow, a la aliasMap?
+ const d = field.directives.find(d => d.name.value === directiveName);
+ // Fortunately argumentsObjectFromField works for DirectiveNode!
+ const directiveArgs = d && argumentsObjectFromField(d, variables);
+ // For directives without arguments (d defined, but directiveArgs ===
+ // null), the presence or absence of the directive still counts as
+ // part of the field key, so we return null in those cases. If no
+ // directive with this name was found for this field (d undefined and
+ // thus directiveArgs undefined), we return undefined, which causes
+ // this value to be omitted from the key object returned by
+ // collectSpecifierPaths.
+ return directiveArgs && extractKeyPath(
+ directiveArgs,
+ // If keyPath.length === 1, this code calls extractKeyPath with an
+ // empty path, which works because it uses directiveArgs as the
+ // extracted value.
+ keyPath.slice(1),
+ );
+ }
+ // If the key started with @ but there was no corresponding directive,
+ // we want to omit this value from the key object, not fall through to
+ // treating @whatever as a normal argument name.
+ return;
+ }
+
+ if (firstChar === "$") {
+ const variableName = firstKey.slice(1);
+ if (variables && hasOwn.call(variables, variableName)) {
+ const varKeyPath = keyPath.slice(0);
+ varKeyPath[0] = variableName;
+ return extractKeyPath(variables, varKeyPath);
+ }
+ // If the key started with $ but there was no corresponding variable, we
+ // want to omit this value from the key object, not fall through to
+ // treating $whatever as a normal argument name.
+ return;
+ }
+
+ if (args) {
+ return extractKeyPath(args, keyPath);
+ }
+ });
+
+ const suffix = JSON.stringify(collected);
+
+ // If no arguments were passed to this field, and it didn't have any other
+ // field key contributions from directives or variables, hide the empty
+ // :{} suffix from the field key. However, a field passed no arguments can
+ // still end up with a non-empty :{...} suffix if its key configuration
+ // refers to directives or variables.
+ if (args || suffix !== "{}") {
+ fieldName += ":" + suffix;
+ }
+
+ return fieldName;
+ });
+}
+
+export function collectSpecifierPaths(
+ specifier: KeySpecifier,
+ extractor: (path: string[]) => any,
+): Record {
+ // For each path specified by specifier, invoke the extractor, and repeatedly
+ // merge the results together, with appropriate ancestor context.
+ const merger = new DeepMerger;
+ return getSpecifierPaths(specifier).reduce((collected, path) => {
+ let toMerge = extractor(path);
+ if (toMerge !== void 0) {
+ // This path is not expected to contain array indexes, so the toMerge
+ // reconstruction will not contain arrays. TODO Fix this?
+ for (let i = path.length - 1; i >= 0; --i) {
+ toMerge = { [path[i]]: toMerge };
+ }
+ collected = merger.merge(collected, toMerge);
+ }
+ return collected;
+ }, Object.create(null));
+}
+
+export function getSpecifierPaths(spec: KeySpecifier): string[][] {
+ const info = lookupSpecifierInfo(spec);
+
+ if (!info.paths) {
+ const paths: string[][] = info.paths = [];
+ const currentPath: string[] = [];
+
+ spec.forEach((s, i) => {
+ if (Array.isArray(s)) {
+ getSpecifierPaths(s).forEach(p => paths.push(currentPath.concat(p)));
+ currentPath.length = 0;
+ } else {
+ currentPath.push(s);
+ if (!Array.isArray(spec[i + 1])) {
+ paths.push(currentPath.slice(0));
+ currentPath.length = 0;
+ }
+ }
+ });
+ }
+
+ return info.paths!;
+}
+
+function extractKey<
+ TObj extends Record,
+ TKey extends string,
+>(object: TObj, key: TKey): TObj[TKey] | undefined {
+ return object[key];
+}
+
+export function extractKeyPath(
+ object: Record,
+ path: string[],
+ extract?: typeof extractKey,
+): any {
+ // For each key in path, extract the corresponding child property from obj,
+ // flattening arrays if encountered (uncommon for keyFields and keyArgs, but
+ // possible). The final result of path.reduce is normalized so unexpected leaf
+ // objects have their keys safely sorted. That final result is difficult to
+ // type as anything other than any. You're welcome to try to improve the
+ // return type, but keep in mind extractKeyPath is not a public function
+ // (exported only for testing), so the effort may not be worthwhile unless the
+ // limited set of actual callers (see above) pass arguments that TypeScript
+ // can statically type. If we know only that path is some array of strings
+ // (and not, say, a specific tuple of statically known strings), any (or
+ // possibly unknown) is the honest answer.
+ extract = extract || extractKey;
+ return normalize(path.reduce(function reducer(obj, key): any {
+ return Array.isArray(obj)
+ ? obj.map(child => reducer(child, key))
+ : obj && extract!(obj, key);
+ }, object));
+}
+
+function normalize(value: T): T {
+ // Usually the extracted value will be a scalar value, since most primary
+ // key fields are scalar, but just in case we get an object or an array, we
+ // need to do some normalization of the order of (nested) keys.
+ if (isNonNullObject(value)) {
+ if (Array.isArray(value)) {
+ return value.map(normalize) as any;
+ }
+ return collectSpecifierPaths(
+ Object.keys(value).sort(),
+ path => extractKeyPath(value, path),
+ ) as T;
+ }
+ return value;
+}
diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts
index cb1e0ade8ef..3d942404155 100644
--- a/src/cache/inmemory/policies.ts
+++ b/src/cache/inmemory/policies.ts
@@ -7,13 +7,8 @@ import {
FieldNode,
} from 'graphql';
-import { Trie } from '@wry/trie';
-
import {
FragmentMap,
- getFragmentFromSelection,
- isField,
- getTypenameFromResult,
storeKeyNameFromField,
StoreValue,
StoreObject,
@@ -21,10 +16,8 @@ import {
Reference,
isReference,
getStoreKeyName,
- canUseWeakMap,
isNonNullObject,
stringifyForDisplay,
- isNonEmptyArray,
} from '../../utilities';
import {
IdGetter,
@@ -56,6 +49,7 @@ import { WriteContext } from './writeToStore';
// used by getStoreKeyName. This function is used when computing storeFieldName
// strings (when no keyArgs has been configured for a field).
import { canonicalStringify } from './object-canon';
+import { keyArgsFnFromSpecifier, keyFieldsFnFromSpecifier } from './key-extractor';
getStoreKeyName.setStringify(canonicalStringify);
@@ -65,14 +59,40 @@ export type TypePolicies = {
// TypeScript 3.7 will allow recursive type aliases, so this should work:
// type KeySpecifier = (string | KeySpecifier)[]
-type KeySpecifier = (string | any[])[];
+export type KeySpecifier = (string | KeySpecifier)[];
export type KeyFieldsContext = {
- typename?: string;
+ // The __typename of the incoming object, even if the __typename field was
+ // aliased to another name in the raw result object. May be undefined when
+ // dataIdFromObject is called for objects without __typename fields.
+ typename: string | undefined;
+
+ // The object to be identified, after processing to remove aliases and
+ // normalize identifiable child objects with references.
+ storeObject: StoreObject;
+
+ // Handy tool for reading additional fields from context.storeObject, either
+ // readField("fieldName") to read storeObject[fieldName], or readField("name",
+ // objectOrReference) to read from another object or Reference. If you read a
+ // field with a read function, that function will be invoked.
+ readField: ReadFieldFunction;
+
+ // If you are writing a custom keyFields function, and you plan to use the raw
+ // result object passed as the first argument, you may also need access to the
+ // selection set and available fragments for this object, just in case any
+ // fields have aliases. Since this logic is tricky to get right, and these
+ // context properties are not even always provided (for example, they are
+ // omitted when calling cache.identify(object), where object is assumed to be
+ // a StoreObject), we recommend you use context.storeObject (which has already
+ // been de-aliased) and context.readField (which can read from references as
+ // well as objects) instead of the raw result object in your keyFields
+ // functions, or just rely on the internal implementation of keyFields:[...]
+ // syntax to get these details right for you.
selectionSet?: SelectionSetNode;
fragmentMap?: FragmentMap;
- // May be set by the KeyFieldsFunction to report fields that were involved
- // in computing the ID. Never passed in by the caller.
+
+ // Internal. May be set by the KeyFieldsFunction to report fields that were
+ // involved in computing the ID. Never passed in by the caller.
keyObject?: Record;
};
@@ -309,28 +329,38 @@ export class Policies {
public identify(
object: StoreObject,
- selectionSet?: SelectionSetNode,
- fragmentMap?: FragmentMap,
+ partialContext?: Partial,
): [string?, StoreObject?] {
- // TODO Use an AliasMap here?
- const typename = selectionSet && fragmentMap
- ? getTypenameFromResult(object, selectionSet, fragmentMap)
- : object.__typename;
-
- // It should be possible to write root Query fields with
- // writeFragment, using { __typename: "Query", ... } as the data, but
- // it does not make sense to allow the same identification behavior
- // for the Mutation and Subscription types, since application code
- // should never be writing directly to (or reading directly from)
- // those root objects.
+ const policies = this;
+
+ const typename = partialContext && (
+ partialContext.typename ||
+ partialContext.storeObject?.__typename
+ ) || object.__typename;
+
+ // It should be possible to write root Query fields with writeFragment,
+ // using { __typename: "Query", ... } as the data, but it does not make
+ // sense to allow the same identification behavior for the Mutation and
+ // Subscription types, since application code should never be writing
+ // directly to (or reading directly from) those root objects.
if (typename === this.rootTypenamesById.ROOT_QUERY) {
return ["ROOT_QUERY"];
}
+ // Default context.storeObject to object if not otherwise provided.
+ const storeObject = partialContext && partialContext.storeObject || object;
+
const context: KeyFieldsContext = {
+ ...partialContext,
typename,
- selectionSet,
- fragmentMap,
+ storeObject,
+ readField: partialContext && partialContext.readField || function () {
+ const options = normalizeReadFieldOptions(arguments, storeObject);
+ return policies.readField(options, {
+ store: policies.cache["data"],
+ variables: options.variables,
+ });
+ },
};
let id: KeyFieldsResult;
@@ -866,52 +896,57 @@ function makeFieldFunctionOptions(
storage,
cache: policies.cache,
canRead,
+ readField() {
+ return policies.readField(
+ normalizeReadFieldOptions(arguments, objectOrReference, context),
+ context,
+ );
+ },
+ mergeObjects: makeMergeObjectsFunction(context.store),
+ };
+}
- readField(
- fieldNameOrOptions: string | ReadFieldOptions,
- from?: StoreObject | Reference,
- ) {
- let options: ReadFieldOptions;
- if (typeof fieldNameOrOptions === "string") {
- options = {
- fieldName: fieldNameOrOptions,
- // Default to objectOrReference only when no second argument was
- // passed for the from parameter, not when undefined is explicitly
- // passed as the second argument.
- from: arguments.length > 1 ? from : objectOrReference,
- };
- } else if (isNonNullObject(fieldNameOrOptions)) {
- options = { ...fieldNameOrOptions };
- // Default to objectOrReference only when fieldNameOrOptions.from is
- // actually omitted, rather than just undefined.
- if (!hasOwn.call(fieldNameOrOptions, "from")) {
- options.from = objectOrReference;
- }
- } else {
- invariant.warn(`Unexpected readField arguments: ${
- stringifyForDisplay(Array.from(arguments))
- }`);
- // The readField helper function returns undefined for any missing
- // fields, so it should also return undefined if the arguments were not
- // of a type we expected.
- return;
- }
-
- if (__DEV__ && options.from === void 0) {
- invariant.warn(`Undefined 'from' passed to readField with arguments ${
- stringifyForDisplay(Array.from(arguments))
- }`);
- }
+export function normalizeReadFieldOptions(
+ readFieldArgs: IArguments,
+ objectOrReference: StoreObject | Reference | undefined,
+ variables?: ReadMergeModifyContext["variables"],
+): ReadFieldOptions {
+ const {
+ 0: fieldNameOrOptions,
+ 1: from,
+ length: argc,
+ } = readFieldArgs;
+
+ let options: ReadFieldOptions;
+
+ if (typeof fieldNameOrOptions === "string") {
+ options = {
+ fieldName: fieldNameOrOptions,
+ // Default to objectOrReference only when no second argument was
+ // passed for the from parameter, not when undefined is explicitly
+ // passed as the second argument.
+ from: argc > 1 ? from : objectOrReference,
+ };
+ } else {
+ options = { ...fieldNameOrOptions };
+ // Default to objectOrReference only when fieldNameOrOptions.from is
+ // actually omitted, rather than just undefined.
+ if (!hasOwn.call(options, "from")) {
+ options.from = objectOrReference;
+ }
+ }
- if (void 0 === options.variables) {
- options.variables = variables;
- }
+ if (__DEV__ && options.from === void 0) {
+ invariant.warn(`Undefined 'from' passed to readField with arguments ${
+ stringifyForDisplay(Array.from(readFieldArgs))
+ }`);
+ }
- return policies.readField(options, context);
- },
+ if (void 0 === options.variables) {
+ options.variables = variables;
+ }
- mergeObjects: makeMergeObjectsFunction(context.store),
- };
+ return options;
}
function makeMergeObjectsFunction(
@@ -964,201 +999,3 @@ function makeMergeObjectsFunction(
return incoming;
};
}
-
-function keyArgsFnFromSpecifier(
- specifier: KeySpecifier,
-): KeyArgsFunction {
- return (args, context) => {
- let key = context.fieldName;
-
- const suffix = JSON.stringify(
- computeKeyArgsObject(specifier, context.field, args, context.variables),
- );
-
- // If no arguments were passed to this field, and it didn't have any other
- // field key contributions from directives or variables, hide the empty :{}
- // suffix from the field key. However, a field passed no arguments can still
- // end up with a non-empty :{...} suffix if its key configuration refers to
- // directives or variables.
- if (args || suffix !== "{}") {
- key += ":" + suffix;
- }
-
- return key;
- };
-}
-
-function keyFieldsFnFromSpecifier(
- specifier: KeySpecifier,
-): KeyFieldsFunction {
- const trie = new Trie<{
- aliasMap?: AliasMap;
- }>(canUseWeakMap);
-
- return (object, context) => {
- let aliasMap: AliasMap | undefined;
- if (context.selectionSet && context.fragmentMap) {
- const info = trie.lookupArray([
- context.selectionSet,
- context.fragmentMap,
- ]);
- aliasMap = info.aliasMap || (
- info.aliasMap = makeAliasMap(context.selectionSet, context.fragmentMap)
- );
- }
-
- const keyObject = context.keyObject =
- computeKeyFieldsObject(specifier, object, aliasMap);
-
- return `${context.typename}:${JSON.stringify(keyObject)}`;
- };
-}
-
-type AliasMap = {
- // Map from store key to corresponding response key. Undefined when there are
- // no aliased fields in this selection set.
- aliases?: Record;
- // Map from store key to AliasMap correponding to a child selection set.
- // Undefined when there are no child selection sets.
- subsets?: Record;
-};
-
-function makeAliasMap(
- selectionSet: SelectionSetNode,
- fragmentMap: FragmentMap,
-): AliasMap {
- let map: AliasMap = Object.create(null);
- // TODO Cache this work, perhaps by storing selectionSet._aliasMap?
- const workQueue = new Set([selectionSet]);
- workQueue.forEach(selectionSet => {
- selectionSet.selections.forEach(selection => {
- if (isField(selection)) {
- if (selection.alias) {
- const responseKey = selection.alias.value;
- const storeKey = selection.name.value;
- if (storeKey !== responseKey) {
- const aliases = map.aliases || (map.aliases = Object.create(null));
- aliases[storeKey] = responseKey;
- }
- }
- if (selection.selectionSet) {
- const subsets = map.subsets || (map.subsets = Object.create(null));
- subsets[selection.name.value] =
- makeAliasMap(selection.selectionSet, fragmentMap);
- }
- } else {
- const fragment = getFragmentFromSelection(selection, fragmentMap);
- if (fragment) {
- workQueue.add(fragment.selectionSet);
- }
- }
- });
- });
- return map;
-}
-
-function computeKeyFieldsObject(
- specifier: KeySpecifier,
- response: Record,
- aliasMap: AliasMap | undefined,
-): Record {
- // The order of adding properties to keyObj affects its JSON serialization,
- // so we are careful to build keyObj in the order of keys given in
- // specifier.
- const keyObj = Object.create(null);
-
- // The lastResponseKey variable tracks keys as seen in actual GraphQL response
- // objects, potentially affected by aliasing. The lastActualKey variable
- // tracks the corresponding key after removing aliases.
- let lastResponseKey: string | undefined;
- let lastActualKey: string | undefined;
-
- const aliases = aliasMap && aliasMap.aliases;
- const subsets = aliasMap && aliasMap.subsets;
-
- specifier.forEach(s => {
- if (Array.isArray(s)) {
- if (typeof lastActualKey === "string" &&
- typeof lastResponseKey === "string") {
- keyObj[lastActualKey] = computeKeyFieldsObject(
- s, // An array of fields names to extract from the previous response
- // object (response[lastResponseKey]).
- response[lastResponseKey],
- subsets && subsets[lastActualKey],
- );
- }
- } else {
- const responseKey = aliases && aliases[s] || s;
- invariant(
- hasOwn.call(response, responseKey),
- `Missing field '${responseKey}' while extracting keyFields from ${
- JSON.stringify(response)
- }`,
- );
- keyObj[lastActualKey = s] = response[lastResponseKey = responseKey];
- }
- });
-
- return keyObj;
-}
-
-function computeKeyArgsObject(
- specifier: KeySpecifier,
- field: FieldNode | null,
- source: Record | null,
- variables: Record | undefined,
-): Record {
- // The order of adding properties to keyObj affects its JSON serialization,
- // so we are careful to build keyObj in the order of keys given in
- // specifier.
- const keyObj = Object.create(null);
-
- let last: undefined | {
- key: string;
- source: any;
- };
-
- specifier.forEach(key => {
- if (Array.isArray(key)) {
- if (last) {
- keyObj[last.key] =
- computeKeyArgsObject(key, field, last.source, variables);
- }
- } else {
- const firstChar = key.charAt(0);
-
- if (firstChar === "@") {
- if (field && isNonEmptyArray(field.directives)) {
- // TODO Cache this work somehow, a la aliasMap?
- const directiveName = key.slice(1);
- // If the directive appears multiple times, only the first
- // occurrence's arguments will be used. TODO Allow repetition?
- const d = field.directives.find(d => d.name.value === directiveName);
- if (d) {
- last = {
- key,
- // Fortunately argumentsObjectFromField works for DirectiveNode!
- source: keyObj[key] = argumentsObjectFromField(d, variables),
- };
- return;
- }
- }
-
- } else if (firstChar === "$") {
- const variableName = key.slice(1);
- if (variables && hasOwn.call(variables, variableName)) {
- last = { key, source: keyObj[key] = variables[variableName] };
- return;
- }
-
- } else if (source && hasOwn.call(source, key)) {
- last = { key, source: keyObj[key] = source[key] };
- return;
- }
-
- last = void 0;
- }
- });
-
- return keyObj;
-}
diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts
index a66182ace90..1476191259c 100644
--- a/src/cache/inmemory/writeToStore.ts
+++ b/src/cache/inmemory/writeToStore.ts
@@ -35,6 +35,8 @@ import { InMemoryCache } from './inMemoryCache';
import { EntityStore } from './entityStore';
import { Cache } from '../../core';
import { canonicalStringify } from './object-canon';
+import { normalizeReadFieldOptions } from './policies';
+import { ReadFieldFunction } from '../core/types/common';
export interface WriteContext extends ReadMergeModifyContext {
readonly written: {
@@ -224,51 +226,10 @@ export class StoreWriter {
}: ProcessSelectionSetOptions): StoreObject | Reference {
const { policies } = this.cache;
- // Identify the result object, even if dataId was already provided,
- // since we always need keyObject below.
- const [id, keyObject] = policies.identify(
- result, selectionSet, context.fragmentMap);
-
- // If dataId was not provided, fall back to the id just generated by
- // policies.identify.
- dataId = dataId || id;
-
- if ("string" === typeof dataId) {
- // Avoid processing the same entity object using the same selection
- // set more than once. We use an array instead of a Set since most
- // entity IDs will be written using only one selection set, so the
- // size of this array is likely to be very small, meaning indexOf is
- // likely to be faster than Set.prototype.has.
- const sets = context.written[dataId] || (context.written[dataId] = []);
- const ref = makeReference(dataId);
- if (sets.indexOf(selectionSet) >= 0) return ref;
- sets.push(selectionSet);
-
- // If we're about to write a result object into the store, but we
- // happen to know that the exact same (===) result object would be
- // returned if we were to reread the result with the same inputs,
- // then we can skip the rest of the processSelectionSet work for
- // this object, and immediately return a Reference to it.
- if (this.reader && this.reader.isFresh(
- result,
- ref,
- selectionSet,
- context,
- )) {
- return ref;
- }
- }
-
// This variable will be repeatedly updated using context.merge to
// accumulate all fields that need to be written into the store.
let incoming: StoreObject = Object.create(null);
- // Write any key fields that were used during identification, even if
- // they were not mentioned in the original query.
- if (keyObject) {
- incoming = context.merge(incoming, keyObject);
- }
-
// If typename was not passed in, infer it. Note that typename is
// always passed in for tricky-to-infer cases such as "Query" for
// ROOT_QUERY.
@@ -281,6 +242,38 @@ export class StoreWriter {
incoming.__typename = typename;
}
+ // This readField function will be passed as context.readField in the
+ // KeyFieldsContext object created within policies.identify (called below).
+ // In addition to reading from the existing context.store (thanks to the
+ // policies.readField(options, context) line at the very bottom), this
+ // version of readField can read from Reference objects that are currently
+ // pending in context.incomingById, which is important whenever keyFields
+ // need to be extracted from a child object that processSelectionSet has
+ // turned into a Reference.
+ const readField: ReadFieldFunction = function (this: void) {
+ const options = normalizeReadFieldOptions(
+ arguments,
+ incoming,
+ context.variables,
+ );
+
+ if (isReference(options.from)) {
+ const info = context.incomingById.get(options.from.__ref);
+ if (info) {
+ const result = policies.readField({
+ ...options,
+ from: info.storeObject
+ }, context);
+
+ if (result !== void 0) {
+ return result;
+ }
+ }
+ }
+
+ return policies.readField(options, context);
+ };
+
const fieldNodeSet = new Set();
this.flattenFields(
@@ -325,33 +318,10 @@ export class StoreWriter {
// The field's value can be an object that has a __typename only if the
// field has a selection set. Otherwise incomingValue is scalar.
- if (field.selectionSet) {
- // We attempt to find the child __typename first in context.store, but
- // the child object may not exist in the store yet, likely because
- // it's being written for the first time, during this very call to
- // writeToStore. Note: if incomingValue is a non-normalized
- // StoreObject (not a Reference), getFieldValue will read from that
- // object's properties to find its __typename.
- childTypename = context.store.getFieldValue(
- incomingValue as StoreObject | Reference,
- "__typename",
- );
-
- // If the child object is being written for the first time, but
- // incomingValue is a Reference, then the entity that Reference
- // identifies should have an entry in context.incomingById, which
- // likely contains a __typename field we can use. After all, how could
- // we know the object's ID if it had no __typename? If we wrote data
- // into context.store as each processSelectionSet call finished
- // processing an entity object, the child object would already be in
- // context.store, so we wouldn't need this extra check, but holding
- // all context.store.merge calls until after we've finished all
- // processSelectionSet work is cleaner and solves other problems, such
- // as issue #8370.
- if (!childTypename && isReference(incomingValue)) {
- const info = context.incomingById.get(incomingValue.__ref);
- childTypename = info && info.storeObject.__typename;
- }
+ if (field.selectionSet &&
+ (isReference(incomingValue) ||
+ storeValueIsStoreObject(incomingValue))) {
+ childTypename = readField("__typename", incomingValue);
}
const merge = policies.getMergeFunction(
@@ -393,7 +363,58 @@ export class StoreWriter {
}
});
+ // Identify the result object, even if dataId was already provided,
+ // since we always need keyObject below.
+ try {
+ const [id, keyObject] = policies.identify(result, {
+ typename,
+ selectionSet,
+ fragmentMap: context.fragmentMap,
+ storeObject: incoming,
+ readField,
+ });
+
+ // If dataId was not provided, fall back to the id just generated by
+ // policies.identify.
+ dataId = dataId || id;
+
+ // Write any key fields that were used during identification, even if
+ // they were not mentioned in the original query.
+ if (keyObject) {
+ // TODO Reverse the order of the arguments?
+ incoming = context.merge(incoming, keyObject);
+ }
+ } catch (e) {
+ // If dataId was provided, tolerate failure of policies.identify.
+ if (!dataId) throw e;
+ }
+
if ("string" === typeof dataId) {
+ const dataRef = makeReference(dataId);
+
+ // Avoid processing the same entity object using the same selection
+ // set more than once. We use an array instead of a Set since most
+ // entity IDs will be written using only one selection set, so the
+ // size of this array is likely to be very small, meaning indexOf is
+ // likely to be faster than Set.prototype.has.
+ const sets = context.written[dataId] || (context.written[dataId] = []);
+ if (sets.indexOf(selectionSet) >= 0) return dataRef;
+ sets.push(selectionSet);
+
+ // If we're about to write a result object into the store, but we
+ // happen to know that the exact same (===) result object would be
+ // returned if we were to reread the result with the same inputs,
+ // then we can skip the rest of the processSelectionSet work for
+ // this object, and immediately return a Reference to it.
+ if (this.reader && this.reader.isFresh(
+ result,
+ dataRef,
+ selectionSet,
+ context,
+ )) {
+ return dataRef;
+ }
+
const previous = context.incomingById.get(dataId);
if (previous) {
previous.storeObject = context.merge(previous.storeObject, incoming);
@@ -409,7 +430,8 @@ export class StoreWriter {
fieldNodeSet,
});
}
- return makeReference(dataId);
+
+ return dataRef;
}
return incoming;