Skip to content

Commit

Permalink
Restrict field name over-invalidation to fields without keyArgs. (#7351)
Browse files Browse the repository at this point in the history
Implementing the idea I described in this comment: #7324 (comment)
  • Loading branch information
benjamn authored Nov 21, 2020
1 parent 3e58873 commit dcbd814
Show file tree
Hide file tree
Showing 4 changed files with 226 additions and 3 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,9 @@
- `ApolloCache` objects (including `InMemoryCache`) may now be associated with or disassociated from individual reactive variables by calling `reactiveVar.attachCache(cache)` and/or `reactiveVar.forgetCache(cache)`. <br/>
[@benjamn](https://github.com/benjamn) in [#7350](https://github.com/apollographql/apollo-client/pull/7350)

- Modifying `InMemoryCache` fields that have `keyArgs` configured will now invalidate only the field value with matching key arguments, rather than invalidating all field values that share the same field name. If `keyArgs` has not been configured, the cache must err on the side of invalidating by field name, as before. <br/>
[@benjamn](https://github.com/benjamn) in [#7351](https://github.com/apollographql/apollo-client/pull/7351)

## Apollo Client 3.2.7

## Bug Fixes
Expand Down
186 changes: 186 additions & 0 deletions src/cache/inmemory/__tests__/entityStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { InMemoryCache } from '../inMemoryCache';
import { DocumentNode } from 'graphql';
import { StoreObject } from '../types';
import { ApolloCache } from '../../core/cache';
import { Cache } from '../../core/types/Cache';
import { Reference, makeReference, isReference } from '../../../utilities/graphql/storeUtils';
import { MissingFieldError } from '../..';

Expand Down Expand Up @@ -2277,4 +2278,189 @@ describe('EntityStore', () => {
favorited: false,
}});
});

it("should not over-invalidate fields with keyArgs", () => {
const isbnsWeHaveRead: string[] = [];

const cache = new InMemoryCache({
typePolicies: {
Query: {
fields: {
book: {
// The presence of this keyArgs configuration permits the
// cache to track result caching dependencies at the level
// of individual Books, so writing one Book does not
// invalidate other Books with different ISBNs. If the cache
// doesn't know which arguments are "important," it can't
// make any assumptions about the relationships between
// field values with the same field name but different
// arguments, so it has to err on the side of invalidating
// all Query.book data whenever any Book is written.
keyArgs: ["isbn"],

read(book, { args, toReference }) {
isbnsWeHaveRead.push(args!.isbn);
return book || toReference({
__typename: "Book",
isbn: args!.isbn,
});
},
},
},
},

Book: {
keyFields: ["isbn"],
},
},
});

const query = gql`
query Book($isbn: string) {
book(isbn: $isbn) {
title
isbn
author {
name
}
}
}
`;

const diffs: Cache.DiffResult<any>[] = [];
cache.watch({
query,
optimistic: true,
variables: {
isbn: "1449373321",
},
callback(diff) {
diffs.push(diff);
},
});

const ddiaData = {
book: {
__typename: "Book",
isbn: "1449373321",
title: "Designing Data-Intensive Applications",
author: {
__typename: "Author",
name: "Martin Kleppmann",
},
},
};

expect(isbnsWeHaveRead).toEqual([]);

cache.writeQuery({
query,
variables: {
isbn: "1449373321",
},
data: ddiaData,
});

expect(isbnsWeHaveRead).toEqual([
"1449373321",
]);

expect(diffs).toEqual([
{
complete: true,
result: ddiaData,
},
]);

const theEndData = {
book: {
__typename: "Book",
isbn: "1982103558",
title: "The End of Everything",
author: {
__typename: "Author",
name: "Katie Mack",
},
},
};

cache.writeQuery({
query,
variables: {
isbn: "1982103558",
},
data: theEndData,
});

// This list does not include the book we just wrote, because the
// cache.watch we started above only depends on the Query.book field
// value corresponding to the 1449373321 ISBN.
expect(diffs).toEqual([
{
complete: true,
result: ddiaData,
},
]);

// Likewise, this list is unchanged, because we did not need to read
// the 1449373321 Book again after writing the 1982103558 data.
expect(isbnsWeHaveRead).toEqual([
"1449373321",
]);

const theEndResult = cache.readQuery({
query,
variables: {
isbn: "1982103558",
},
});

expect(theEndResult).toEqual(theEndData);

expect(isbnsWeHaveRead).toEqual([
"1449373321",
"1982103558",
]);

expect(cache.readQuery({
query,
variables: {
isbn: "1449373321",
},
})).toBe(diffs[0].result);

expect(cache.readQuery({
query,
variables: {
isbn: "1982103558",
},
})).toBe(theEndResult);

// Still no additional reads, because both books are cached.
expect(isbnsWeHaveRead).toEqual([
"1449373321",
"1982103558",
]);

// Evicting the 1982103558 Book should not invalidate the 1449373321
// Book, so diffs and isbnsWeHaveRead should remain unchanged.
expect(cache.evict({
id: cache.identify({
__typename: "Book",
isbn: "1982103558",
}),
})).toBe(true);

expect(diffs).toEqual([
{
complete: true,
result: ddiaData,
},
]);

expect(isbnsWeHaveRead).toEqual([
"1449373321",
"1982103558",
]);
});
});
35 changes: 32 additions & 3 deletions src/cache/inmemory/entityStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,23 +95,42 @@ export abstract class EntityStore implements NormalizedCache {

public merge(dataId: string, incoming: StoreObject): void {
const existing = this.lookup(dataId);
const merged = new DeepMerger(storeObjectReconciler).merge(existing, incoming);
const merged: StoreObject =
new DeepMerger(storeObjectReconciler).merge(existing, incoming);
// Even if merged === existing, existing may have come from a lower
// layer, so we always need to set this.data[dataId] on this level.
this.data[dataId] = merged;
if (merged !== existing) {
delete this.refs[dataId];
if (this.group.caching) {
const fieldsToDirty: Record<string, 1> = Object.create(null);

// If we added a new StoreObject where there was previously none, dirty
// anything that depended on the existence of this dataId, such as the
// EntityStore#has method.
if (!existing) fieldsToDirty.__exists = 1;

// Now invalidate dependents who called getFieldValue for any fields
// that are changing as a result of this merge.
Object.keys(incoming).forEach(storeFieldName => {
if (!existing || existing[storeFieldName] !== merged[storeFieldName]) {
fieldsToDirty[fieldNameFromStoreName(storeFieldName)] = 1;
// Always dirty the full storeFieldName, which may include
// serialized arguments following the fieldName prefix.
fieldsToDirty[storeFieldName] = 1;

// Also dirty fieldNameFromStoreName(storeFieldName) if it's
// different from storeFieldName and this field does not have
// keyArgs configured, because that means the cache can't make
// any assumptions about how field values with the same field
// name but different arguments might be interrelated, so it
// must err on the side of invalidating all field values that
// share the same short fieldName, regardless of arguments.
const fieldName = fieldNameFromStoreName(storeFieldName);
if (fieldName !== storeFieldName &&
!this.policies.hasKeyArgs(merged.__typename, fieldName)) {
fieldsToDirty[fieldName] = 1;
}

// If merged[storeFieldName] has become undefined, and this is the
// Root layer, actually delete the property from the merged object,
// which is guaranteed to have been created fresh in this method.
Expand All @@ -120,6 +139,7 @@ export abstract class EntityStore implements NormalizedCache {
}
}
});

Object.keys(fieldsToDirty).forEach(
fieldName => this.group.dirty(dataId, fieldName));
}
Expand Down Expand Up @@ -456,6 +476,15 @@ class CacheGroup {
public depend(dataId: string, storeFieldName: string) {
if (this.d) {
this.d(makeDepKey(dataId, storeFieldName));
const fieldName = fieldNameFromStoreName(storeFieldName);
if (fieldName !== storeFieldName) {
// Fields with arguments that contribute extra identifying
// information to the fieldName (thus forming the storeFieldName)
// depend not only on the full storeFieldName but also on the
// short fieldName, so the field can be invalidated using either
// level of specificity.
this.d(makeDepKey(dataId, fieldName));
}
}
}

Expand All @@ -474,7 +503,7 @@ function makeDepKey(dataId: string, storeFieldName: string) {
// Since field names cannot have '#' characters in them, this method
// of joining the field name and the ID should be unambiguous, and much
// cheaper than JSON.stringify([dataId, fieldName]).
return fieldNameFromStoreName(storeFieldName) + '#' + dataId;
return storeFieldName + '#' + dataId;
}

export namespace EntityStore {
Expand Down
5 changes: 5 additions & 0 deletions src/cache/inmemory/policies.ts
Original file line number Diff line number Diff line change
Expand Up @@ -646,6 +646,11 @@ export class Policies {
return false;
}

public hasKeyArgs(typename: string | undefined, fieldName: string) {
const policy = this.getFieldPolicy(typename, fieldName, false);
return !!(policy && policy.keyFn);
}

public getStoreFieldName(fieldSpec: FieldSpecifier): string {
const { typename, fieldName } = fieldSpec;
const policy = this.getFieldPolicy(typename, fieldName, false);
Expand Down

0 comments on commit dcbd814

Please sign in to comment.