Skip to content

Commit

Permalink
Move warnAboutDataLoss into policies.ts.
Browse files Browse the repository at this point in the history
It's much easier for the Policies code to check whether there's a merge
function defined for a given parent __typename and field name.
  • Loading branch information
benjamn committed Jan 24, 2020
1 parent 11f101c commit bc2712d
Show file tree
Hide file tree
Showing 2 changed files with 97 additions and 88 deletions.
78 changes: 0 additions & 78 deletions src/cache/inmemory/entityStore.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { dep, OptimisticDependencyFunction, KeyTrie } from 'optimism';
import { invariant } from 'ts-invariant';
import { equal } from '@wry/equality';

import { isReference, StoreValue } from '../../utilities/graphql/storeUtils';
Expand Down Expand Up @@ -460,10 +459,6 @@ const storeObjectReconciler: ReconcilerFunction<[EntityStore]> = function (
if (equal(existing, incoming)) {
return existing;
}

if (process.env.NODE_ENV !== 'production') {
warnAboutDataLoss(existingObject, incomingObject, property, store);
}
}

// In all other cases, incoming replaces existing without any effort to
Expand All @@ -472,79 +467,6 @@ const storeObjectReconciler: ReconcilerFunction<[EntityStore]> = function (
return incoming;
}

const warnings = new Set<string>();

function warnAboutDataLoss(
existingObject: Record<string | number, any>,
incomingObject: Record<string | number, any>,
property: string | number,
store: EntityStore,
) {
const existing = existingObject[property];
const incoming = incomingObject[property];

// It's always safe to replace a reference, since it refers to data
// safely stored elsewhere.
if (isReference(existing)) return;

// If we're replacing every key of the existing object, then the
// existing data would be overwritten even if the objects were
// normalized, so warning would not be helpful here.
if (Object.keys(existing).every(
isReference(incoming)
? key => store.has(incoming.__ref, key)
: key => hasOwn.call(incoming, key))) {
return;
}

const parentType =
getTypenameFromStoreObject(store, existingObject) ||
getTypenameFromStoreObject(store, incomingObject);

const fieldName = fieldNameFromStoreName(String(property));
const typeDotName = `${parentType}.${fieldName}`;

if (warnings.has(typeDotName)) return;
warnings.add(typeDotName);

const childTypenames: string[] = [];
// Arrays do not have __typename fields, and always need a custom merge
// function, even if their elements are normalized entities.
if (!Array.isArray(existing) &&
!Array.isArray(incoming)) {
[existing, incoming].forEach(child => {
const typename = getTypenameFromStoreObject(store, child);
if (typeof typename === "string" &&
!childTypenames.includes(typename)) {
childTypenames.push(typename);
}
});
}

invariant.warn(
`Cache data may be lost when replacing the ${fieldName} field of a ${parentType} object.
To address this problem (which is not a bug in Apollo Client), ${
childTypenames.length
? "either ensure that objects of type " +
childTypenames.join(" and ") + " have IDs, or "
: ""
}define a custom merge function for the ${
typeDotName
} field, so the InMemoryCache can safely merge these objects:
existing: ${JSON.stringify(existing).slice(0, 1000)}
incoming: ${JSON.stringify(incoming).slice(0, 1000)}
For more information about these options, please refer to the documentation:
* Ensuring entity objects have IDs: https://deploy-preview-5677--apollo-client-docs.netlify.com/docs/react/v3.0-beta/caching/cache-configuration/#generating-unique-identifiers
* Defining custom merge functions: https://deploy-preview-5677--apollo-client-docs.netlify.com/docs/react/v3.0-beta/caching/cache-field-behavior/#merging-non-normalized-objects
`
);
}

export function supportsResultCaching(store: any): store is EntityStore {
// When result caching is disabled, store.depend will be null.
return !!(store instanceof EntityStore && store.group.caching);
Expand Down
107 changes: 97 additions & 10 deletions src/cache/inmemory/policies.ts
Original file line number Diff line number Diff line change
Expand Up @@ -594,14 +594,6 @@ export class Policies {

const merge = policy && policy.merge;
if (merge) {
if (process.env.NODE_ENV !== "production") {
// It may be tempting to modify existing data directly, for example
// by pushing more elements onto an existing array, but merge
// functions are expected to be pure, so it's important that we
// enforce immutability in development.
maybeDeepFreeze(existing);
}

// If storage ends up null, that just means no options.storage object
// has ever been created for a read function for this field before, so
// there's nothing this merge function could do with options.storage
Expand Down Expand Up @@ -659,6 +651,10 @@ export class Policies {
const e = existing as StoreObject | Reference;
const i = incoming as object as StoreObject;

const typename =
getFieldValue<string>(e, "__typename") ||
getFieldValue<string>(i, "__typename");

// If the existing object is a { __ref } object, e.__ref provides a
// stable key for looking up the storage object associated with
// e.__ref and storeFieldName. Otherwise, storage is enabled only if
Expand All @@ -671,8 +667,9 @@ export class Policies {
: typeof e === "object" && e;

Object.keys(i).forEach(storeFieldName => {
i[storeFieldName] = policies.applyMerges(
getFieldValue(e, storeFieldName),
const existingValue = getFieldValue(e, storeFieldName);
const mergedValue = i[storeFieldName] = policies.applyMerges(
existingValue,
i[storeFieldName],
getFieldValue,
variables,
Expand All @@ -681,13 +678,103 @@ export class Policies {
// read function for this field.
firstStorageKey && [firstStorageKey, storeFieldName],
);

if (
process.env.NODE_ENV !== 'production' &&
mergedValue !== existingValue &&
!policies.hasMergeFunction(
typename, fieldNameFromStoreName(storeFieldName))
) {
warnAboutDataLoss(e, i, storeFieldName, getFieldValue);
}
});
}

return incoming;
}
}

const warnings = new Set<string>();

// Note that this function is unused in production, and thus should be pruned
// by any well-configured minifier.
function warnAboutDataLoss(
existingObject: StoreObject | Reference,
incomingObject: StoreObject | Reference,
storeFieldName: string,
getFieldValue: FieldValueGetter,
) {
const getChild = (objOrRef: StoreObject | Reference): StoreObject => {
const child = getFieldValue<StoreObject>(objOrRef, storeFieldName);
return typeof child === "object" && child;
};

const existing = getChild(existingObject);
if (!existing) return;

const incoming = getChild(incomingObject);
if (!incoming) return;

// It's always safe to replace a reference, since it refers to data
// safely stored elsewhere.
if (isReference(existing)) return;

// If we're replacing every key of the existing object, then the
// existing data would be overwritten even if the objects were
// normalized, so warning would not be helpful here.
if (Object.keys(existing).every(
key => getFieldValue(incoming, key) !== void 0)) {
return;
}

const parentType =
getFieldValue(existingObject, "__typename") ||
getFieldValue(incomingObject, "__typename");

const fieldName = fieldNameFromStoreName(storeFieldName);
const typeDotName = `${parentType}.${fieldName}`;

if (warnings.has(typeDotName)) return;
warnings.add(typeDotName);

const childTypenames: string[] = [];
// Arrays do not have __typename fields, and always need a custom merge
// function, even if their elements are normalized entities.
if (!Array.isArray(existing) &&
!Array.isArray(incoming)) {
[existing, incoming].forEach(child => {
const typename = getFieldValue(child, "__typename");
if (typeof typename === "string" &&
!childTypenames.includes(typename)) {
childTypenames.push(typename);
}
});
}

invariant.warn(
`Cache data may be lost when replacing the ${fieldName} field of a ${parentType} object.
To address this problem (which is not a bug in Apollo Client), ${
childTypenames.length
? "either ensure that objects of type " +
childTypenames.join(" and ") + " have IDs, or "
: ""
}define a custom merge function for the ${
typeDotName
} field, so the InMemoryCache can safely merge these objects:
existing: ${JSON.stringify(existing).slice(0, 1000)}
incoming: ${JSON.stringify(incoming).slice(0, 1000)}
For more information about these options, please refer to the documentation:
* Ensuring entity objects have IDs: https://deploy-preview-5677--apollo-client-docs.netlify.com/docs/react/v3.0-beta/caching/cache-configuration/#generating-unique-identifiers
* Defining custom merge functions: https://deploy-preview-5677--apollo-client-docs.netlify.com/docs/react/v3.0-beta/caching/cache-field-behavior/#merging-non-normalized-objects
`
);
}

function keyArgsFnFromSpecifier(
specifier: KeySpecifier,
): KeyArgsFunction {
Expand Down

0 comments on commit bc2712d

Please sign in to comment.