1 | import { __assign, __extends, __rest } from "tslib";
|
2 | import { invariant } from "../../utilities/globals/index.js";
|
3 | import { dep } from "optimism";
|
4 | import { equal } from "@wry/equality";
|
5 | import { Trie } from "@wry/trie";
|
6 | import { isReference, makeReference, DeepMerger, maybeDeepFreeze, canUseWeakMap, isNonNullObject, } from "../../utilities/index.js";
|
7 | import { hasOwn, fieldNameFromStoreName } from "./helpers.js";
|
8 | var DELETE = Object.create(null);
|
9 | var delModifier = function () { return DELETE; };
|
10 | var INVALIDATE = Object.create(null);
|
11 | var EntityStore = /** @class */ (function () {
|
12 | function EntityStore(policies, group) {
|
13 | var _this = this;
|
14 | this.policies = policies;
|
15 | this.group = group;
|
16 | this.data = Object.create(null);
|
17 | // Maps root entity IDs to the number of times they have been retained, minus
|
18 | // the number of times they have been released. Retained entities keep other
|
19 | // entities they reference (even indirectly) from being garbage collected.
|
20 | this.rootIds = Object.create(null);
|
21 | // Lazily tracks { __ref: <dataId> } strings contained by this.data[dataId].
|
22 | this.refs = Object.create(null);
|
23 | // Bound function that can be passed around to provide easy access to fields
|
24 | // of Reference objects as well as ordinary objects.
|
25 | this.getFieldValue = function (objectOrReference, storeFieldName) {
|
26 | return maybeDeepFreeze(isReference(objectOrReference) ?
|
27 | _this.get(objectOrReference.__ref, storeFieldName)
|
28 | : objectOrReference && objectOrReference[storeFieldName]);
|
29 | };
|
30 | // Returns true for non-normalized StoreObjects and non-dangling
|
31 | // References, indicating that readField(name, objOrRef) has a chance of
|
32 | // working. Useful for filtering out dangling references from lists.
|
33 | this.canRead = function (objOrRef) {
|
34 | return isReference(objOrRef) ?
|
35 | _this.has(objOrRef.__ref)
|
36 | : typeof objOrRef === "object";
|
37 | };
|
38 | // Bound function that converts an id or an object with a __typename and
|
39 | // primary key fields to a Reference object. If called with a Reference object,
|
40 | // that same Reference object is returned. Pass true for mergeIntoStore to persist
|
41 | // an object into the store.
|
42 | this.toReference = function (objOrIdOrRef, mergeIntoStore) {
|
43 | if (typeof objOrIdOrRef === "string") {
|
44 | return makeReference(objOrIdOrRef);
|
45 | }
|
46 | if (isReference(objOrIdOrRef)) {
|
47 | return objOrIdOrRef;
|
48 | }
|
49 | var id = _this.policies.identify(objOrIdOrRef)[0];
|
50 | if (id) {
|
51 | var ref = makeReference(id);
|
52 | if (mergeIntoStore) {
|
53 | _this.merge(id, objOrIdOrRef);
|
54 | }
|
55 | return ref;
|
56 | }
|
57 | };
|
58 | }
|
59 | // Although the EntityStore class is abstract, it contains concrete
|
60 | // implementations of the various NormalizedCache interface methods that
|
61 | // are inherited by the Root and Layer subclasses.
|
62 | EntityStore.prototype.toObject = function () {
|
63 | return __assign({}, this.data);
|
64 | };
|
65 | EntityStore.prototype.has = function (dataId) {
|
66 | return this.lookup(dataId, true) !== void 0;
|
67 | };
|
68 | EntityStore.prototype.get = function (dataId, fieldName) {
|
69 | this.group.depend(dataId, fieldName);
|
70 | if (hasOwn.call(this.data, dataId)) {
|
71 | var storeObject = this.data[dataId];
|
72 | if (storeObject && hasOwn.call(storeObject, fieldName)) {
|
73 | return storeObject[fieldName];
|
74 | }
|
75 | }
|
76 | if (fieldName === "__typename" &&
|
77 | hasOwn.call(this.policies.rootTypenamesById, dataId)) {
|
78 | return this.policies.rootTypenamesById[dataId];
|
79 | }
|
80 | if (this instanceof Layer) {
|
81 | return this.parent.get(dataId, fieldName);
|
82 | }
|
83 | };
|
84 | EntityStore.prototype.lookup = function (dataId, dependOnExistence) {
|
85 | // The has method (above) calls lookup with dependOnExistence = true, so
|
86 | // that it can later be invalidated when we add or remove a StoreObject for
|
87 | // this dataId. Any consumer who cares about the contents of the StoreObject
|
88 | // should not rely on this dependency, since the contents could change
|
89 | // without the object being added or removed.
|
90 | if (dependOnExistence)
|
91 | this.group.depend(dataId, "__exists");
|
92 | if (hasOwn.call(this.data, dataId)) {
|
93 | return this.data[dataId];
|
94 | }
|
95 | if (this instanceof Layer) {
|
96 | return this.parent.lookup(dataId, dependOnExistence);
|
97 | }
|
98 | if (this.policies.rootTypenamesById[dataId]) {
|
99 | return Object.create(null);
|
100 | }
|
101 | };
|
102 | EntityStore.prototype.merge = function (older, newer) {
|
103 | var _this = this;
|
104 | var dataId;
|
105 | // Convert unexpected references to ID strings.
|
106 | if (isReference(older))
|
107 | older = older.__ref;
|
108 | if (isReference(newer))
|
109 | newer = newer.__ref;
|
110 | var existing = typeof older === "string" ? this.lookup((dataId = older)) : older;
|
111 | var incoming = typeof newer === "string" ? this.lookup((dataId = newer)) : newer;
|
112 | // If newer was a string ID, but that ID was not defined in this store,
|
113 | // then there are no fields to be merged, so we're done.
|
114 | if (!incoming)
|
115 | return;
|
116 | invariant(typeof dataId === "string", 1);
|
117 | var merged = new DeepMerger(storeObjectReconciler).merge(existing, incoming);
|
118 | // Even if merged === existing, existing may have come from a lower
|
119 | // layer, so we always need to set this.data[dataId] on this level.
|
120 | this.data[dataId] = merged;
|
121 | if (merged !== existing) {
|
122 | delete this.refs[dataId];
|
123 | if (this.group.caching) {
|
124 | var fieldsToDirty_1 = Object.create(null);
|
125 | // If we added a new StoreObject where there was previously none, dirty
|
126 | // anything that depended on the existence of this dataId, such as the
|
127 | // EntityStore#has method.
|
128 | if (!existing)
|
129 | fieldsToDirty_1.__exists = 1;
|
130 | // Now invalidate dependents who called getFieldValue for any fields
|
131 | // that are changing as a result of this merge.
|
132 | Object.keys(incoming).forEach(function (storeFieldName) {
|
133 | if (!existing ||
|
134 | existing[storeFieldName] !== merged[storeFieldName]) {
|
135 | // Always dirty the full storeFieldName, which may include
|
136 | // serialized arguments following the fieldName prefix.
|
137 | fieldsToDirty_1[storeFieldName] = 1;
|
138 | // Also dirty fieldNameFromStoreName(storeFieldName) if it's
|
139 | // different from storeFieldName and this field does not have
|
140 | // keyArgs configured, because that means the cache can't make
|
141 | // any assumptions about how field values with the same field
|
142 | // name but different arguments might be interrelated, so it
|
143 | // must err on the side of invalidating all field values that
|
144 | // share the same short fieldName, regardless of arguments.
|
145 | var fieldName = fieldNameFromStoreName(storeFieldName);
|
146 | if (fieldName !== storeFieldName &&
|
147 | !_this.policies.hasKeyArgs(merged.__typename, fieldName)) {
|
148 | fieldsToDirty_1[fieldName] = 1;
|
149 | }
|
150 | // If merged[storeFieldName] has become undefined, and this is the
|
151 | // Root layer, actually delete the property from the merged object,
|
152 | // which is guaranteed to have been created fresh in this method.
|
153 | if (merged[storeFieldName] === void 0 && !(_this instanceof Layer)) {
|
154 | delete merged[storeFieldName];
|
155 | }
|
156 | }
|
157 | });
|
158 | if (fieldsToDirty_1.__typename &&
|
159 | !(existing && existing.__typename) &&
|
160 | // Since we return default root __typename strings
|
161 | // automatically from store.get, we don't need to dirty the
|
162 | // ROOT_QUERY.__typename field if merged.__typename is equal
|
163 | // to the default string (usually "Query").
|
164 | this.policies.rootTypenamesById[dataId] === merged.__typename) {
|
165 | delete fieldsToDirty_1.__typename;
|
166 | }
|
167 | Object.keys(fieldsToDirty_1).forEach(function (fieldName) {
|
168 | return _this.group.dirty(dataId, fieldName);
|
169 | });
|
170 | }
|
171 | }
|
172 | };
|
173 | EntityStore.prototype.modify = function (dataId, fields) {
|
174 | var _this = this;
|
175 | var storeObject = this.lookup(dataId);
|
176 | if (storeObject) {
|
177 | var changedFields_1 = Object.create(null);
|
178 | var needToMerge_1 = false;
|
179 | var allDeleted_1 = true;
|
180 | var sharedDetails_1 = {
|
181 | DELETE: DELETE,
|
182 | INVALIDATE: INVALIDATE,
|
183 | isReference: isReference,
|
184 | toReference: this.toReference,
|
185 | canRead: this.canRead,
|
186 | readField: function (fieldNameOrOptions, from) {
|
187 | return _this.policies.readField(typeof fieldNameOrOptions === "string" ?
|
188 | {
|
189 | fieldName: fieldNameOrOptions,
|
190 | from: from || makeReference(dataId),
|
191 | }
|
192 | : fieldNameOrOptions, { store: _this });
|
193 | },
|
194 | };
|
195 | Object.keys(storeObject).forEach(function (storeFieldName) {
|
196 | var fieldName = fieldNameFromStoreName(storeFieldName);
|
197 | var fieldValue = storeObject[storeFieldName];
|
198 | if (fieldValue === void 0)
|
199 | return;
|
200 | var modify = typeof fields === "function" ? fields : (fields[storeFieldName] || fields[fieldName]);
|
201 | if (modify) {
|
202 | var newValue = modify === delModifier ? DELETE : (modify(maybeDeepFreeze(fieldValue), __assign(__assign({}, sharedDetails_1), { fieldName: fieldName, storeFieldName: storeFieldName, storage: _this.getStorage(dataId, storeFieldName) })));
|
203 | if (newValue === INVALIDATE) {
|
204 | _this.group.dirty(dataId, storeFieldName);
|
205 | }
|
206 | else {
|
207 | if (newValue === DELETE)
|
208 | newValue = void 0;
|
209 | if (newValue !== fieldValue) {
|
210 | changedFields_1[storeFieldName] = newValue;
|
211 | needToMerge_1 = true;
|
212 | fieldValue = newValue;
|
213 | if (globalThis.__DEV__ !== false) {
|
214 | var checkReference = function (ref) {
|
215 | if (_this.lookup(ref.__ref) === undefined) {
|
216 | globalThis.__DEV__ !== false && invariant.warn(2, ref);
|
217 | return true;
|
218 | }
|
219 | };
|
220 | if (isReference(newValue)) {
|
221 | checkReference(newValue);
|
222 | }
|
223 | else if (Array.isArray(newValue)) {
|
224 | // Warn about writing "mixed" arrays of Reference and non-Reference objects
|
225 | var seenReference = false;
|
226 | var someNonReference = void 0;
|
227 | for (var _i = 0, newValue_1 = newValue; _i < newValue_1.length; _i++) {
|
228 | var value = newValue_1[_i];
|
229 | if (isReference(value)) {
|
230 | seenReference = true;
|
231 | if (checkReference(value))
|
232 | break;
|
233 | }
|
234 | else {
|
235 | // Do not warn on primitive values, since those could never be represented
|
236 | // by a reference. This is a valid (albeit uncommon) use case.
|
237 | if (typeof value === "object" && !!value) {
|
238 | var id = _this.policies.identify(value)[0];
|
239 | // check if object could even be referenced, otherwise we are not interested in it for this warning
|
240 | if (id) {
|
241 | someNonReference = value;
|
242 | }
|
243 | }
|
244 | }
|
245 | if (seenReference && someNonReference !== undefined) {
|
246 | globalThis.__DEV__ !== false && invariant.warn(3, someNonReference);
|
247 | break;
|
248 | }
|
249 | }
|
250 | }
|
251 | }
|
252 | }
|
253 | }
|
254 | }
|
255 | if (fieldValue !== void 0) {
|
256 | allDeleted_1 = false;
|
257 | }
|
258 | });
|
259 | if (needToMerge_1) {
|
260 | this.merge(dataId, changedFields_1);
|
261 | if (allDeleted_1) {
|
262 | if (this instanceof Layer) {
|
263 | this.data[dataId] = void 0;
|
264 | }
|
265 | else {
|
266 | delete this.data[dataId];
|
267 | }
|
268 | this.group.dirty(dataId, "__exists");
|
269 | }
|
270 | return true;
|
271 | }
|
272 | }
|
273 | return false;
|
274 | };
|
275 | // If called with only one argument, removes the entire entity
|
276 | // identified by dataId. If called with a fieldName as well, removes all
|
277 | // fields of that entity whose names match fieldName according to the
|
278 | // fieldNameFromStoreName helper function. If called with a fieldName
|
279 | // and variables, removes all fields of that entity whose names match fieldName
|
280 | // and whose arguments when cached exactly match the variables passed.
|
281 | EntityStore.prototype.delete = function (dataId, fieldName, args) {
|
282 | var _a;
|
283 | var storeObject = this.lookup(dataId);
|
284 | if (storeObject) {
|
285 | var typename = this.getFieldValue(storeObject, "__typename");
|
286 | var storeFieldName = fieldName && args ?
|
287 | this.policies.getStoreFieldName({ typename: typename, fieldName: fieldName, args: args })
|
288 | : fieldName;
|
289 | return this.modify(dataId, storeFieldName ? (_a = {},
|
290 | _a[storeFieldName] = delModifier,
|
291 | _a) : delModifier);
|
292 | }
|
293 | return false;
|
294 | };
|
295 | EntityStore.prototype.evict = function (options, limit) {
|
296 | var evicted = false;
|
297 | if (options.id) {
|
298 | if (hasOwn.call(this.data, options.id)) {
|
299 | evicted = this.delete(options.id, options.fieldName, options.args);
|
300 | }
|
301 | if (this instanceof Layer && this !== limit) {
|
302 | evicted = this.parent.evict(options, limit) || evicted;
|
303 | }
|
304 | // Always invalidate the field to trigger rereading of watched
|
305 | // queries, even if no cache data was modified by the eviction,
|
306 | // because queries may depend on computed fields with custom read
|
307 | // functions, whose values are not stored in the EntityStore.
|
308 | if (options.fieldName || evicted) {
|
309 | this.group.dirty(options.id, options.fieldName || "__exists");
|
310 | }
|
311 | }
|
312 | return evicted;
|
313 | };
|
314 | EntityStore.prototype.clear = function () {
|
315 | this.replace(null);
|
316 | };
|
317 | EntityStore.prototype.extract = function () {
|
318 | var _this = this;
|
319 | var obj = this.toObject();
|
320 | var extraRootIds = [];
|
321 | this.getRootIdSet().forEach(function (id) {
|
322 | if (!hasOwn.call(_this.policies.rootTypenamesById, id)) {
|
323 | extraRootIds.push(id);
|
324 | }
|
325 | });
|
326 | if (extraRootIds.length) {
|
327 | obj.__META = { extraRootIds: extraRootIds.sort() };
|
328 | }
|
329 | return obj;
|
330 | };
|
331 | EntityStore.prototype.replace = function (newData) {
|
332 | var _this = this;
|
333 | Object.keys(this.data).forEach(function (dataId) {
|
334 | if (!(newData && hasOwn.call(newData, dataId))) {
|
335 | _this.delete(dataId);
|
336 | }
|
337 | });
|
338 | if (newData) {
|
339 | var __META = newData.__META, rest_1 = __rest(newData, ["__META"]);
|
340 | Object.keys(rest_1).forEach(function (dataId) {
|
341 | _this.merge(dataId, rest_1[dataId]);
|
342 | });
|
343 | if (__META) {
|
344 | __META.extraRootIds.forEach(this.retain, this);
|
345 | }
|
346 | }
|
347 | };
|
348 | EntityStore.prototype.retain = function (rootId) {
|
349 | return (this.rootIds[rootId] = (this.rootIds[rootId] || 0) + 1);
|
350 | };
|
351 | EntityStore.prototype.release = function (rootId) {
|
352 | if (this.rootIds[rootId] > 0) {
|
353 | var count = --this.rootIds[rootId];
|
354 | if (!count)
|
355 | delete this.rootIds[rootId];
|
356 | return count;
|
357 | }
|
358 | return 0;
|
359 | };
|
360 | // Return a Set<string> of all the ID strings that have been retained by
|
361 | // this layer/root *and* any layers/roots beneath it.
|
362 | EntityStore.prototype.getRootIdSet = function (ids) {
|
363 | if (ids === void 0) { ids = new Set(); }
|
364 | Object.keys(this.rootIds).forEach(ids.add, ids);
|
365 | if (this instanceof Layer) {
|
366 | this.parent.getRootIdSet(ids);
|
367 | }
|
368 | else {
|
369 | // Official singleton IDs like ROOT_QUERY and ROOT_MUTATION are
|
370 | // always considered roots for garbage collection, regardless of
|
371 | // their retainment counts in this.rootIds.
|
372 | Object.keys(this.policies.rootTypenamesById).forEach(ids.add, ids);
|
373 | }
|
374 | return ids;
|
375 | };
|
376 | // The goal of garbage collection is to remove IDs from the Root layer of the
|
377 | // store that are no longer reachable starting from any IDs that have been
|
378 | // explicitly retained (see retain and release, above). Returns an array of
|
379 | // dataId strings that were removed from the store.
|
380 | EntityStore.prototype.gc = function () {
|
381 | var _this = this;
|
382 | var ids = this.getRootIdSet();
|
383 | var snapshot = this.toObject();
|
384 | ids.forEach(function (id) {
|
385 | if (hasOwn.call(snapshot, id)) {
|
386 | // Because we are iterating over an ECMAScript Set, the IDs we add here
|
387 | // will be visited in later iterations of the forEach loop only if they
|
388 | // were not previously contained by the Set.
|
389 | Object.keys(_this.findChildRefIds(id)).forEach(ids.add, ids);
|
390 | // By removing IDs from the snapshot object here, we protect them from
|
391 | // getting removed from the root store layer below.
|
392 | delete snapshot[id];
|
393 | }
|
394 | });
|
395 | var idsToRemove = Object.keys(snapshot);
|
396 | if (idsToRemove.length) {
|
397 | var root_1 = this;
|
398 | while (root_1 instanceof Layer)
|
399 | root_1 = root_1.parent;
|
400 | idsToRemove.forEach(function (id) { return root_1.delete(id); });
|
401 | }
|
402 | return idsToRemove;
|
403 | };
|
404 | EntityStore.prototype.findChildRefIds = function (dataId) {
|
405 | if (!hasOwn.call(this.refs, dataId)) {
|
406 | var found_1 = (this.refs[dataId] = Object.create(null));
|
407 | var root = this.data[dataId];
|
408 | if (!root)
|
409 | return found_1;
|
410 | var workSet_1 = new Set([root]);
|
411 | // Within the store, only arrays and objects can contain child entity
|
412 | // references, so we can prune the traversal using this predicate:
|
413 | workSet_1.forEach(function (obj) {
|
414 | if (isReference(obj)) {
|
415 | found_1[obj.__ref] = true;
|
416 | // In rare cases, a { __ref } Reference object may have other fields.
|
417 | // This often indicates a mismerging of References with StoreObjects,
|
418 | // but garbage collection should not be fooled by a stray __ref
|
419 | // property in a StoreObject (ignoring all the other fields just
|
420 | // because the StoreObject looks like a Reference). To avoid this
|
421 | // premature termination of findChildRefIds recursion, we fall through
|
422 | // to the code below, which will handle any other properties of obj.
|
423 | }
|
424 | if (isNonNullObject(obj)) {
|
425 | Object.keys(obj).forEach(function (key) {
|
426 | var child = obj[key];
|
427 | // No need to add primitive values to the workSet, since they cannot
|
428 | // contain reference objects.
|
429 | if (isNonNullObject(child)) {
|
430 | workSet_1.add(child);
|
431 | }
|
432 | });
|
433 | }
|
434 | });
|
435 | }
|
436 | return this.refs[dataId];
|
437 | };
|
438 | EntityStore.prototype.makeCacheKey = function () {
|
439 | return this.group.keyMaker.lookupArray(arguments);
|
440 | };
|
441 | return EntityStore;
|
442 | }());
|
443 | export { EntityStore };
|
444 | // A single CacheGroup represents a set of one or more EntityStore objects,
|
445 | // typically the Root store in a CacheGroup by itself, and all active Layer
|
446 | // stores in a group together. A single EntityStore object belongs to only
|
447 | // one CacheGroup, store.group. The CacheGroup is responsible for tracking
|
448 | // dependencies, so store.group is helpful for generating unique keys for
|
449 | // cached results that need to be invalidated when/if those dependencies
|
450 | // change. If we used the EntityStore objects themselves as cache keys (that
|
451 | // is, store rather than store.group), the cache would become unnecessarily
|
452 | // fragmented by all the different Layer objects. Instead, the CacheGroup
|
453 | // approach allows all optimistic Layer objects in the same linked list to
|
454 | // belong to one CacheGroup, with the non-optimistic Root object belonging
|
455 | // to another CacheGroup, allowing resultCaching dependencies to be tracked
|
456 | // separately for optimistic and non-optimistic entity data.
|
457 | var CacheGroup = /** @class */ (function () {
|
458 | function CacheGroup(caching, parent) {
|
459 | if (parent === void 0) { parent = null; }
|
460 | this.caching = caching;
|
461 | this.parent = parent;
|
462 | this.d = null;
|
463 | this.resetCaching();
|
464 | }
|
465 | CacheGroup.prototype.resetCaching = function () {
|
466 | this.d = this.caching ? dep() : null;
|
467 | this.keyMaker = new Trie(canUseWeakMap);
|
468 | };
|
469 | CacheGroup.prototype.depend = function (dataId, storeFieldName) {
|
470 | if (this.d) {
|
471 | this.d(makeDepKey(dataId, storeFieldName));
|
472 | var fieldName = fieldNameFromStoreName(storeFieldName);
|
473 | if (fieldName !== storeFieldName) {
|
474 | // Fields with arguments that contribute extra identifying
|
475 | // information to the fieldName (thus forming the storeFieldName)
|
476 | // depend not only on the full storeFieldName but also on the
|
477 | // short fieldName, so the field can be invalidated using either
|
478 | // level of specificity.
|
479 | this.d(makeDepKey(dataId, fieldName));
|
480 | }
|
481 | if (this.parent) {
|
482 | this.parent.depend(dataId, storeFieldName);
|
483 | }
|
484 | }
|
485 | };
|
486 | CacheGroup.prototype.dirty = function (dataId, storeFieldName) {
|
487 | if (this.d) {
|
488 | this.d.dirty(makeDepKey(dataId, storeFieldName),
|
489 | // When storeFieldName === "__exists", that means the entity identified
|
490 | // by dataId has either disappeared from the cache or was newly added,
|
491 | // so the result caching system would do well to "forget everything it
|
492 | // knows" about that object. To achieve that kind of invalidation, we
|
493 | // not only dirty the associated result cache entry, but also remove it
|
494 | // completely from the dependency graph. For the optimism implementation
|
495 | // details, see https://github.com/benjamn/optimism/pull/195.
|
496 | storeFieldName === "__exists" ? "forget" : "setDirty");
|
497 | }
|
498 | };
|
499 | return CacheGroup;
|
500 | }());
|
501 | function makeDepKey(dataId, storeFieldName) {
|
502 | // Since field names cannot have '#' characters in them, this method
|
503 | // of joining the field name and the ID should be unambiguous, and much
|
504 | // cheaper than JSON.stringify([dataId, fieldName]).
|
505 | return storeFieldName + "#" + dataId;
|
506 | }
|
507 | export function maybeDependOnExistenceOfEntity(store, entityId) {
|
508 | if (supportsResultCaching(store)) {
|
509 | // We use this pseudo-field __exists elsewhere in the EntityStore code to
|
510 | // represent changes in the existence of the entity object identified by
|
511 | // entityId. This dependency gets reliably dirtied whenever an object with
|
512 | // this ID is deleted (or newly created) within this group, so any result
|
513 | // cache entries (for example, StoreReader#executeSelectionSet results) that
|
514 | // depend on __exists for this entityId will get dirtied as well, leading to
|
515 | // the eventual recomputation (instead of reuse) of those result objects the
|
516 | // next time someone reads them from the cache.
|
517 | store.group.depend(entityId, "__exists");
|
518 | }
|
519 | }
|
520 | (function (EntityStore) {
|
521 | // Refer to this class as EntityStore.Root outside this namespace.
|
522 | var Root = /** @class */ (function (_super) {
|
523 | __extends(Root, _super);
|
524 | function Root(_a) {
|
525 | var policies = _a.policies, _b = _a.resultCaching, resultCaching = _b === void 0 ? true : _b, seed = _a.seed;
|
526 | var _this = _super.call(this, policies, new CacheGroup(resultCaching)) || this;
|
527 | _this.stump = new Stump(_this);
|
528 | _this.storageTrie = new Trie(canUseWeakMap);
|
529 | if (seed)
|
530 | _this.replace(seed);
|
531 | return _this;
|
532 | }
|
533 | Root.prototype.addLayer = function (layerId, replay) {
|
534 | // Adding an optimistic Layer on top of the Root actually adds the Layer
|
535 | // on top of the Stump, so the Stump always comes between the Root and
|
536 | // any Layer objects that we've added.
|
537 | return this.stump.addLayer(layerId, replay);
|
538 | };
|
539 | Root.prototype.removeLayer = function () {
|
540 | // Never remove the root layer.
|
541 | return this;
|
542 | };
|
543 | Root.prototype.getStorage = function () {
|
544 | return this.storageTrie.lookupArray(arguments);
|
545 | };
|
546 | return Root;
|
547 | }(EntityStore));
|
548 | EntityStore.Root = Root;
|
549 | })(EntityStore || (EntityStore = {}));
|
550 | // Not exported, since all Layer instances are created by the addLayer method
|
551 | // of the EntityStore.Root class.
|
552 | var Layer = /** @class */ (function (_super) {
|
553 | __extends(Layer, _super);
|
554 | function Layer(id, parent, replay, group) {
|
555 | var _this = _super.call(this, parent.policies, group) || this;
|
556 | _this.id = id;
|
557 | _this.parent = parent;
|
558 | _this.replay = replay;
|
559 | _this.group = group;
|
560 | replay(_this);
|
561 | return _this;
|
562 | }
|
563 | Layer.prototype.addLayer = function (layerId, replay) {
|
564 | return new Layer(layerId, this, replay, this.group);
|
565 | };
|
566 | Layer.prototype.removeLayer = function (layerId) {
|
567 | var _this = this;
|
568 | // Remove all instances of the given id, not just the first one.
|
569 | var parent = this.parent.removeLayer(layerId);
|
570 | if (layerId === this.id) {
|
571 | if (this.group.caching) {
|
572 | // Dirty every ID we're removing. Technically we might be able to avoid
|
573 | // dirtying fields that have values in higher layers, but we don't have
|
574 | // easy access to higher layers here, and we're about to recreate those
|
575 | // layers anyway (see parent.addLayer below).
|
576 | Object.keys(this.data).forEach(function (dataId) {
|
577 | var ownStoreObject = _this.data[dataId];
|
578 | var parentStoreObject = parent["lookup"](dataId);
|
579 | if (!parentStoreObject) {
|
580 | // The StoreObject identified by dataId was defined in this layer
|
581 | // but will be undefined in the parent layer, so we can delete the
|
582 | // whole entity using this.delete(dataId). Since we're about to
|
583 | // throw this layer away, the only goal of this deletion is to dirty
|
584 | // the removed fields.
|
585 | _this.delete(dataId);
|
586 | }
|
587 | else if (!ownStoreObject) {
|
588 | // This layer had an entry for dataId but it was undefined, which
|
589 | // means the entity was deleted in this layer, and it's about to
|
590 | // become undeleted when we remove this layer, so we need to dirty
|
591 | // all fields that are about to be reexposed.
|
592 | _this.group.dirty(dataId, "__exists");
|
593 | Object.keys(parentStoreObject).forEach(function (storeFieldName) {
|
594 | _this.group.dirty(dataId, storeFieldName);
|
595 | });
|
596 | }
|
597 | else if (ownStoreObject !== parentStoreObject) {
|
598 | // If ownStoreObject is not exactly the same as parentStoreObject,
|
599 | // dirty any fields whose values will change as a result of this
|
600 | // removal.
|
601 | Object.keys(ownStoreObject).forEach(function (storeFieldName) {
|
602 | if (!equal(ownStoreObject[storeFieldName], parentStoreObject[storeFieldName])) {
|
603 | _this.group.dirty(dataId, storeFieldName);
|
604 | }
|
605 | });
|
606 | }
|
607 | });
|
608 | }
|
609 | return parent;
|
610 | }
|
611 | // No changes are necessary if the parent chain remains identical.
|
612 | if (parent === this.parent)
|
613 | return this;
|
614 | // Recreate this layer on top of the new parent.
|
615 | return parent.addLayer(this.id, this.replay);
|
616 | };
|
617 | Layer.prototype.toObject = function () {
|
618 | return __assign(__assign({}, this.parent.toObject()), this.data);
|
619 | };
|
620 | Layer.prototype.findChildRefIds = function (dataId) {
|
621 | var fromParent = this.parent.findChildRefIds(dataId);
|
622 | return hasOwn.call(this.data, dataId) ? __assign(__assign({}, fromParent), _super.prototype.findChildRefIds.call(this, dataId)) : fromParent;
|
623 | };
|
624 | Layer.prototype.getStorage = function () {
|
625 | var p = this.parent;
|
626 | while (p.parent)
|
627 | p = p.parent;
|
628 | return p.getStorage.apply(p,
|
629 | // @ts-expect-error
|
630 | arguments);
|
631 | };
|
632 | return Layer;
|
633 | }(EntityStore));
|
634 | // Represents a Layer permanently installed just above the Root, which allows
|
635 | // reading optimistically (and registering optimistic dependencies) even when
|
636 | // no optimistic layers are currently active. The stump.group CacheGroup object
|
637 | // is shared by any/all Layer objects added on top of the Stump.
|
638 | var Stump = /** @class */ (function (_super) {
|
639 | __extends(Stump, _super);
|
640 | function Stump(root) {
|
641 | return _super.call(this, "EntityStore.Stump", root, function () { }, new CacheGroup(root.group.caching, root.group)) || this;
|
642 | }
|
643 | Stump.prototype.removeLayer = function () {
|
644 | // Never remove the Stump layer.
|
645 | return this;
|
646 | };
|
647 | Stump.prototype.merge = function (older, newer) {
|
648 | // We never want to write any data into the Stump, so we forward any merge
|
649 | // calls to the Root instead. Another option here would be to throw an
|
650 | // exception, but the toReference(object, true) function can sometimes
|
651 | // trigger Stump writes (which used to be Root writes, before the Stump
|
652 | // concept was introduced).
|
653 | return this.parent.merge(older, newer);
|
654 | };
|
655 | return Stump;
|
656 | }(Layer));
|
657 | function storeObjectReconciler(existingObject, incomingObject, property) {
|
658 | var existingValue = existingObject[property];
|
659 | var incomingValue = incomingObject[property];
|
660 | // Wherever there is a key collision, prefer the incoming value, unless
|
661 | // it is deeply equal to the existing value. It's worth checking deep
|
662 | // equality here (even though blindly returning incoming would be
|
663 | // logically correct) because preserving the referential identity of
|
664 | // existing data can prevent needless rereading and rerendering.
|
665 | return equal(existingValue, incomingValue) ? existingValue : incomingValue;
|
666 | }
|
667 | export function supportsResultCaching(store) {
|
668 | // When result caching is disabled, store.depend will be null.
|
669 | return !!(store instanceof EntityStore && store.group.caching);
|
670 | }
|
671 | //# sourceMappingURL=entityStore.js.map |
\ | No newline at end of file |