1 | ;
|
2 |
|
3 | var _create = require("babel-runtime/core-js/object/create");
|
4 |
|
5 | var _create2 = _interopRequireDefault(_create);
|
6 |
|
7 | var _keys = require("babel-runtime/core-js/object/keys");
|
8 |
|
9 | var _keys2 = _interopRequireDefault(_keys);
|
10 |
|
11 | var _classCallCheck2 = require("babel-runtime/helpers/classCallCheck");
|
12 |
|
13 | var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);
|
14 |
|
15 | var _createClass2 = require("babel-runtime/helpers/createClass");
|
16 |
|
17 | var _createClass3 = _interopRequireDefault(_createClass2);
|
18 |
|
19 | var _utils = require("./utils");
|
20 |
|
21 | var _utils2 = _interopRequireDefault(_utils);
|
22 |
|
23 | function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
24 |
|
25 | /**
|
26 | * The purpose of this class is to accumulate /sync responses such that a
|
27 | * complete "initial" JSON response can be returned which accurately represents
|
28 | * the sum total of the /sync responses accumulated to date. It only handles
|
29 | * room data: that is, everything under the "rooms" top-level key.
|
30 | *
|
31 | * This class is used when persisting room data so a complete /sync response can
|
32 | * be loaded from disk and incremental syncs can be performed on the server,
|
33 | * rather than asking the server to do an initial sync on startup.
|
34 | */
|
35 | var SyncAccumulator = function () {
|
36 |
|
37 | /**
|
38 | * @param {Object} opts
|
39 | * @param {Number=} opts.maxTimelineEntries The ideal maximum number of
|
40 | * timeline entries to keep in the sync response. This is best-effort, as
|
41 | * clients do not always have a back-pagination token for each event, so
|
42 | * it's possible there may be slightly *less* than this value. There will
|
43 | * never be more. This cannot be 0 or else it makes it impossible to scroll
|
44 | * back in a room. Default: 50.
|
45 | */
|
46 | function SyncAccumulator(opts) {
|
47 | (0, _classCallCheck3.default)(this, SyncAccumulator);
|
48 |
|
49 | opts = opts || {};
|
50 | opts.maxTimelineEntries = opts.maxTimelineEntries || 50;
|
51 | this.opts = opts;
|
52 | this.accountData = {
|
53 | //$event_type: Object
|
54 | };
|
55 | this.inviteRooms = {
|
56 | //$roomId: { ... sync 'invite' json data ... }
|
57 | };
|
58 | this.joinRooms = {
|
59 | //$roomId: {
|
60 | // _currentState: { $event_type: { $state_key: json } },
|
61 | // _timeline: [
|
62 | // { event: $event, token: null|token },
|
63 | // { event: $event, token: null|token },
|
64 | // { event: $event, token: null|token },
|
65 | // ...
|
66 | // ],
|
67 | // _summary: {
|
68 | // m.heroes: [ $user_id ],
|
69 | // m.joined_member_count: $count,
|
70 | // m.invited_member_count: $count
|
71 | // },
|
72 | // _accountData: { $event_type: json },
|
73 | // _unreadNotifications: { ... unread_notifications JSON ... },
|
74 | // _readReceipts: { $user_id: { data: $json, eventId: $event_id }}
|
75 | //}
|
76 | };
|
77 | // the /sync token which corresponds to the last time rooms were
|
78 | // accumulated. We remember this so that any caller can obtain a
|
79 | // coherent /sync response and know at what point they should be
|
80 | // streaming from without losing events.
|
81 | this.nextBatch = null;
|
82 |
|
83 | // { ('invite'|'join'|'leave'): $groupId: { ... sync 'group' data } }
|
84 | this.groups = {
|
85 | invite: {},
|
86 | join: {},
|
87 | leave: {}
|
88 | };
|
89 | }
|
90 |
|
91 | (0, _createClass3.default)(SyncAccumulator, [{
|
92 | key: "accumulate",
|
93 | value: function accumulate(syncResponse) {
|
94 | this._accumulateRooms(syncResponse);
|
95 | this._accumulateGroups(syncResponse);
|
96 | this._accumulateAccountData(syncResponse);
|
97 | this.nextBatch = syncResponse.next_batch;
|
98 | }
|
99 | }, {
|
100 | key: "_accumulateAccountData",
|
101 | value: function _accumulateAccountData(syncResponse) {
|
102 | var _this = this;
|
103 |
|
104 | if (!syncResponse.account_data || !syncResponse.account_data.events) {
|
105 | return;
|
106 | }
|
107 | // Clobbers based on event type.
|
108 | syncResponse.account_data.events.forEach(function (e) {
|
109 | _this.accountData[e.type] = e;
|
110 | });
|
111 | }
|
112 |
|
113 | /**
|
114 | * Accumulate incremental /sync room data.
|
115 | * @param {Object} syncResponse the complete /sync JSON
|
116 | */
|
117 |
|
118 | }, {
|
119 | key: "_accumulateRooms",
|
120 | value: function _accumulateRooms(syncResponse) {
|
121 | var _this2 = this;
|
122 |
|
123 | if (!syncResponse.rooms) {
|
124 | return;
|
125 | }
|
126 | if (syncResponse.rooms.invite) {
|
127 | (0, _keys2.default)(syncResponse.rooms.invite).forEach(function (roomId) {
|
128 | _this2._accumulateRoom(roomId, "invite", syncResponse.rooms.invite[roomId]);
|
129 | });
|
130 | }
|
131 | if (syncResponse.rooms.join) {
|
132 | (0, _keys2.default)(syncResponse.rooms.join).forEach(function (roomId) {
|
133 | _this2._accumulateRoom(roomId, "join", syncResponse.rooms.join[roomId]);
|
134 | });
|
135 | }
|
136 | if (syncResponse.rooms.leave) {
|
137 | (0, _keys2.default)(syncResponse.rooms.leave).forEach(function (roomId) {
|
138 | _this2._accumulateRoom(roomId, "leave", syncResponse.rooms.leave[roomId]);
|
139 | });
|
140 | }
|
141 | }
|
142 | }, {
|
143 | key: "_accumulateRoom",
|
144 | value: function _accumulateRoom(roomId, category, data) {
|
145 | // Valid /sync state transitions
|
146 | // +--------+ <======+ 1: Accept an invite
|
147 | // +== | INVITE | | (5) 2: Leave a room
|
148 | // | +--------+ =====+ | 3: Join a public room previously
|
149 | // |(1) (4) | | left (handle as if new room)
|
150 | // V (2) V | 4: Reject an invite
|
151 | // +------+ ========> +--------+ 5: Invite to a room previously
|
152 | // | JOIN | (3) | LEAVE* | left (handle as if new room)
|
153 | // +------+ <======== +--------+
|
154 | //
|
155 | // * equivalent to "no state"
|
156 | switch (category) {
|
157 | case "invite":
|
158 | // (5)
|
159 | this._accumulateInviteState(roomId, data);
|
160 | break;
|
161 | case "join":
|
162 | if (this.inviteRooms[roomId]) {
|
163 | // (1)
|
164 | // was previously invite, now join. We expect /sync to give
|
165 | // the entire state and timeline on 'join', so delete previous
|
166 | // invite state
|
167 | delete this.inviteRooms[roomId];
|
168 | }
|
169 | // (3)
|
170 | this._accumulateJoinState(roomId, data);
|
171 | break;
|
172 | case "leave":
|
173 | if (this.inviteRooms[roomId]) {
|
174 | // (4)
|
175 | delete this.inviteRooms[roomId];
|
176 | } else {
|
177 | // (2)
|
178 | delete this.joinRooms[roomId];
|
179 | }
|
180 | break;
|
181 | default:
|
182 | console.error("Unknown cateogory: ", category);
|
183 | }
|
184 | }
|
185 | }, {
|
186 | key: "_accumulateInviteState",
|
187 | value: function _accumulateInviteState(roomId, data) {
|
188 | if (!data.invite_state || !data.invite_state.events) {
|
189 | // no new data
|
190 | return;
|
191 | }
|
192 | if (!this.inviteRooms[roomId]) {
|
193 | this.inviteRooms[roomId] = {
|
194 | invite_state: data.invite_state
|
195 | };
|
196 | return;
|
197 | }
|
198 | // accumulate extra keys for invite->invite transitions
|
199 | // clobber based on event type / state key
|
200 | // We expect invite_state to be small, so just loop over the events
|
201 | var currentData = this.inviteRooms[roomId];
|
202 | data.invite_state.events.forEach(function (e) {
|
203 | var hasAdded = false;
|
204 | for (var i = 0; i < currentData.invite_state.events.length; i++) {
|
205 | var current = currentData.invite_state.events[i];
|
206 | if (current.type === e.type && current.state_key == e.state_key) {
|
207 | currentData.invite_state.events[i] = e; // update
|
208 | hasAdded = true;
|
209 | }
|
210 | }
|
211 | if (!hasAdded) {
|
212 | currentData.invite_state.events.push(e);
|
213 | }
|
214 | });
|
215 | }
|
216 |
|
217 | // Accumulate timeline and state events in a room.
|
218 |
|
219 | }, {
|
220 | key: "_accumulateJoinState",
|
221 | value: function _accumulateJoinState(roomId, data) {
|
222 | // We expect this function to be called a lot (every /sync) so we want
|
223 | // this to be fast. /sync stores events in an array but we often want
|
224 | // to clobber based on type/state_key. Rather than convert arrays to
|
225 | // maps all the time, just keep private maps which contain
|
226 | // the actual current accumulated sync state, and array-ify it when
|
227 | // getJSON() is called.
|
228 |
|
229 | // State resolution:
|
230 | // The 'state' key is the delta from the previous sync (or start of time
|
231 | // if no token was supplied), to the START of the timeline. To obtain
|
232 | // the current state, we need to "roll forward" state by reading the
|
233 | // timeline. We want to store the current state so we can drop events
|
234 | // out the end of the timeline based on opts.maxTimelineEntries.
|
235 | //
|
236 | // 'state' 'timeline' current state
|
237 | // |-------x<======================>x
|
238 | // T I M E
|
239 | //
|
240 | // When getJSON() is called, we 'roll back' the current state by the
|
241 | // number of entries in the timeline to work out what 'state' should be.
|
242 |
|
243 | // Back-pagination:
|
244 | // On an initial /sync, the server provides a back-pagination token for
|
245 | // the start of the timeline. When /sync deltas come down, they also
|
246 | // include back-pagination tokens for the start of the timeline. This
|
247 | // means not all events in the timeline have back-pagination tokens, as
|
248 | // it is only the ones at the START of the timeline which have them.
|
249 | // In order for us to have a valid timeline (and back-pagination token
|
250 | // to match), we need to make sure that when we remove old timeline
|
251 | // events, that we roll forward to an event which has a back-pagination
|
252 | // token. This means we can't keep a strict sliding-window based on
|
253 | // opts.maxTimelineEntries, and we may have a few less. We should never
|
254 | // have more though, provided that the /sync limit is less than or equal
|
255 | // to opts.maxTimelineEntries.
|
256 |
|
257 | if (!this.joinRooms[roomId]) {
|
258 | // Create truly empty objects so event types of 'hasOwnProperty' and co
|
259 | // don't cause this code to break.
|
260 | this.joinRooms[roomId] = {
|
261 | _currentState: (0, _create2.default)(null),
|
262 | _timeline: [],
|
263 | _accountData: (0, _create2.default)(null),
|
264 | _unreadNotifications: {},
|
265 | _summary: {},
|
266 | _readReceipts: {}
|
267 | };
|
268 | }
|
269 | var currentData = this.joinRooms[roomId];
|
270 |
|
271 | if (data.account_data && data.account_data.events) {
|
272 | // clobber based on type
|
273 | data.account_data.events.forEach(function (e) {
|
274 | currentData._accountData[e.type] = e;
|
275 | });
|
276 | }
|
277 |
|
278 | // these probably clobber, spec is unclear.
|
279 | if (data.unread_notifications) {
|
280 | currentData._unreadNotifications = data.unread_notifications;
|
281 | }
|
282 | if (data.summary) {
|
283 | var HEROES_KEY = "m.heroes";
|
284 | var INVITED_COUNT_KEY = "m.invited_member_count";
|
285 | var JOINED_COUNT_KEY = "m.joined_member_count";
|
286 |
|
287 | var acc = currentData._summary;
|
288 | var sum = data.summary;
|
289 | acc[HEROES_KEY] = sum[HEROES_KEY] || acc[HEROES_KEY];
|
290 | acc[JOINED_COUNT_KEY] = sum[JOINED_COUNT_KEY] || acc[JOINED_COUNT_KEY];
|
291 | acc[INVITED_COUNT_KEY] = sum[INVITED_COUNT_KEY] || acc[INVITED_COUNT_KEY];
|
292 | }
|
293 |
|
294 | if (data.ephemeral && data.ephemeral.events) {
|
295 | data.ephemeral.events.forEach(function (e) {
|
296 | // We purposefully do not persist m.typing events.
|
297 | // Technically you could refresh a browser before the timer on a
|
298 | // typing event is up, so it'll look like you aren't typing when
|
299 | // you really still are. However, the alternative is worse. If
|
300 | // we do persist typing events, it will look like people are
|
301 | // typing forever until someone really does start typing (which
|
302 | // will prompt Synapse to send down an actual m.typing event to
|
303 | // clobber the one we persisted).
|
304 | if (e.type !== "m.receipt" || !e.content) {
|
305 | // This means we'll drop unknown ephemeral events but that
|
306 | // seems okay.
|
307 | return;
|
308 | }
|
309 | // Handle m.receipt events. They clobber based on:
|
310 | // (user_id, receipt_type)
|
311 | // but they are keyed in the event as:
|
312 | // content:{ $event_id: { $receipt_type: { $user_id: {json} }}}
|
313 | // so store them in the former so we can accumulate receipt deltas
|
314 | // quickly and efficiently (we expect a lot of them). Fold the
|
315 | // receipt type into the key name since we only have 1 at the
|
316 | // moment (m.read) and nested JSON objects are slower and more
|
317 | // of a hassle to work with. We'll inflate this back out when
|
318 | // getJSON() is called.
|
319 | (0, _keys2.default)(e.content).forEach(function (eventId) {
|
320 | if (!e.content[eventId]["m.read"]) {
|
321 | return;
|
322 | }
|
323 | (0, _keys2.default)(e.content[eventId]["m.read"]).forEach(function (userId) {
|
324 | // clobber on user ID
|
325 | currentData._readReceipts[userId] = {
|
326 | data: e.content[eventId]["m.read"][userId],
|
327 | eventId: eventId
|
328 | };
|
329 | });
|
330 | });
|
331 | });
|
332 | }
|
333 |
|
334 | // if we got a limited sync, we need to remove all timeline entries or else
|
335 | // we will have gaps in the timeline.
|
336 | if (data.timeline && data.timeline.limited) {
|
337 | currentData._timeline = [];
|
338 | }
|
339 |
|
340 | // Work out the current state. The deltas need to be applied in the order:
|
341 | // - existing state which didn't come down /sync.
|
342 | // - State events under the 'state' key.
|
343 | // - State events in the 'timeline'.
|
344 | if (data.state && data.state.events) {
|
345 | data.state.events.forEach(function (e) {
|
346 | setState(currentData._currentState, e);
|
347 | });
|
348 | }
|
349 | if (data.timeline && data.timeline.events) {
|
350 | data.timeline.events.forEach(function (e, index) {
|
351 | // this nops if 'e' isn't a state event
|
352 | setState(currentData._currentState, e);
|
353 | // append the event to the timeline. The back-pagination token
|
354 | // corresponds to the first event in the timeline
|
355 | currentData._timeline.push({
|
356 | event: e,
|
357 | token: index === 0 ? data.timeline.prev_batch : null
|
358 | });
|
359 | });
|
360 | }
|
361 |
|
362 | // attempt to prune the timeline by jumping between events which have
|
363 | // pagination tokens.
|
364 | if (currentData._timeline.length > this.opts.maxTimelineEntries) {
|
365 | var startIndex = currentData._timeline.length - this.opts.maxTimelineEntries;
|
366 | for (var i = startIndex; i < currentData._timeline.length; i++) {
|
367 | if (currentData._timeline[i].token) {
|
368 | // keep all events after this, including this one
|
369 | currentData._timeline = currentData._timeline.slice(i, currentData._timeline.length);
|
370 | break;
|
371 | }
|
372 | }
|
373 | }
|
374 | }
|
375 |
|
376 | /**
|
377 | * Accumulate incremental /sync group data.
|
378 | * @param {Object} syncResponse the complete /sync JSON
|
379 | */
|
380 |
|
381 | }, {
|
382 | key: "_accumulateGroups",
|
383 | value: function _accumulateGroups(syncResponse) {
|
384 | var _this3 = this;
|
385 |
|
386 | if (!syncResponse.groups) {
|
387 | return;
|
388 | }
|
389 | if (syncResponse.groups.invite) {
|
390 | (0, _keys2.default)(syncResponse.groups.invite).forEach(function (groupId) {
|
391 | _this3._accumulateGroup(groupId, "invite", syncResponse.groups.invite[groupId]);
|
392 | });
|
393 | }
|
394 | if (syncResponse.groups.join) {
|
395 | (0, _keys2.default)(syncResponse.groups.join).forEach(function (groupId) {
|
396 | _this3._accumulateGroup(groupId, "join", syncResponse.groups.join[groupId]);
|
397 | });
|
398 | }
|
399 | if (syncResponse.groups.leave) {
|
400 | (0, _keys2.default)(syncResponse.groups.leave).forEach(function (groupId) {
|
401 | _this3._accumulateGroup(groupId, "leave", syncResponse.groups.leave[groupId]);
|
402 | });
|
403 | }
|
404 | }
|
405 | }, {
|
406 | key: "_accumulateGroup",
|
407 | value: function _accumulateGroup(groupId, category, data) {
|
408 | var _arr = ['invite', 'join', 'leave'];
|
409 |
|
410 | for (var _i = 0; _i < _arr.length; _i++) {
|
411 | var cat = _arr[_i];
|
412 | delete this.groups[cat][groupId];
|
413 | }
|
414 | this.groups[category][groupId] = data;
|
415 | }
|
416 |
|
417 | /**
|
418 | * Return everything under the 'rooms' key from a /sync response which
|
419 | * represents all room data that should be stored. This should be paired
|
420 | * with the sync token which represents the most recent /sync response
|
421 | * provided to accumulate().
|
422 | * @return {Object} An object with a "nextBatch", "roomsData" and "accountData"
|
423 | * keys.
|
424 | * The "nextBatch" key is a string which represents at what point in the
|
425 | * /sync stream the accumulator reached. This token should be used when
|
426 | * restarting a /sync stream at startup. Failure to do so can lead to missing
|
427 | * events. The "roomsData" key is an Object which represents the entire
|
428 | * /sync response from the 'rooms' key onwards. The "accountData" key is
|
429 | * a list of raw events which represent global account data.
|
430 | */
|
431 |
|
432 | }, {
|
433 | key: "getJSON",
|
434 | value: function getJSON() {
|
435 | var _this4 = this;
|
436 |
|
437 | var data = {
|
438 | join: {},
|
439 | invite: {},
|
440 | // always empty. This is set by /sync when a room was previously
|
441 | // in 'invite' or 'join'. On fresh startup, the client won't know
|
442 | // about any previous room being in 'invite' or 'join' so we can
|
443 | // just omit mentioning it at all, even if it has previously come
|
444 | // down /sync.
|
445 | // The notable exception is when a client is kicked or banned:
|
446 | // we may want to hold onto that room so the client can clearly see
|
447 | // why their room has disappeared. We don't persist it though because
|
448 | // it is unclear *when* we can safely remove the room from the DB.
|
449 | // Instead, we assume that if you're loading from the DB, you've
|
450 | // refreshed the page, which means you've seen the kick/ban already.
|
451 | leave: {}
|
452 | };
|
453 | (0, _keys2.default)(this.inviteRooms).forEach(function (roomId) {
|
454 | data.invite[roomId] = _this4.inviteRooms[roomId];
|
455 | });
|
456 | (0, _keys2.default)(this.joinRooms).forEach(function (roomId) {
|
457 | var roomData = _this4.joinRooms[roomId];
|
458 | var roomJson = {
|
459 | ephemeral: { events: [] },
|
460 | account_data: { events: [] },
|
461 | state: { events: [] },
|
462 | timeline: {
|
463 | events: [],
|
464 | prev_batch: null
|
465 | },
|
466 | unread_notifications: roomData._unreadNotifications,
|
467 | summary: roomData._summary
|
468 | };
|
469 | // Add account data
|
470 | (0, _keys2.default)(roomData._accountData).forEach(function (evType) {
|
471 | roomJson.account_data.events.push(roomData._accountData[evType]);
|
472 | });
|
473 |
|
474 | // Add receipt data
|
475 | var receiptEvent = {
|
476 | type: "m.receipt",
|
477 | room_id: roomId,
|
478 | content: {
|
479 | // $event_id: { "m.read": { $user_id: $json } }
|
480 | }
|
481 | };
|
482 | (0, _keys2.default)(roomData._readReceipts).forEach(function (userId) {
|
483 | var receiptData = roomData._readReceipts[userId];
|
484 | if (!receiptEvent.content[receiptData.eventId]) {
|
485 | receiptEvent.content[receiptData.eventId] = {
|
486 | "m.read": {}
|
487 | };
|
488 | }
|
489 | receiptEvent.content[receiptData.eventId]["m.read"][userId] = receiptData.data;
|
490 | });
|
491 | // add only if we have some receipt data
|
492 | if ((0, _keys2.default)(receiptEvent.content).length > 0) {
|
493 | roomJson.ephemeral.events.push(receiptEvent);
|
494 | }
|
495 |
|
496 | // Add timeline data
|
497 | roomData._timeline.forEach(function (msgData) {
|
498 | if (!roomJson.timeline.prev_batch) {
|
499 | // the first event we add to the timeline MUST match up to
|
500 | // the prev_batch token.
|
501 | if (!msgData.token) {
|
502 | return; // this shouldn't happen as we prune constantly.
|
503 | }
|
504 | roomJson.timeline.prev_batch = msgData.token;
|
505 | }
|
506 | roomJson.timeline.events.push(msgData.event);
|
507 | });
|
508 |
|
509 | // Add state data: roll back current state to the start of timeline,
|
510 | // by "reverse clobbering" from the end of the timeline to the start.
|
511 | // Convert maps back into arrays.
|
512 | var rollBackState = (0, _create2.default)(null);
|
513 | for (var i = roomJson.timeline.events.length - 1; i >= 0; i--) {
|
514 | var timelineEvent = roomJson.timeline.events[i];
|
515 | if (timelineEvent.state_key === null || timelineEvent.state_key === undefined) {
|
516 | continue; // not a state event
|
517 | }
|
518 | // since we're going back in time, we need to use the previous
|
519 | // state value else we'll break causality. We don't have the
|
520 | // complete previous state event, so we need to create one.
|
521 | var prevStateEvent = _utils2.default.deepCopy(timelineEvent);
|
522 | if (prevStateEvent.unsigned) {
|
523 | if (prevStateEvent.unsigned.prev_content) {
|
524 | prevStateEvent.content = prevStateEvent.unsigned.prev_content;
|
525 | }
|
526 | if (prevStateEvent.unsigned.prev_sender) {
|
527 | prevStateEvent.sender = prevStateEvent.unsigned.prev_sender;
|
528 | }
|
529 | }
|
530 | setState(rollBackState, prevStateEvent);
|
531 | }
|
532 | (0, _keys2.default)(roomData._currentState).forEach(function (evType) {
|
533 | (0, _keys2.default)(roomData._currentState[evType]).forEach(function (stateKey) {
|
534 | var ev = roomData._currentState[evType][stateKey];
|
535 | if (rollBackState[evType] && rollBackState[evType][stateKey]) {
|
536 | // use the reverse clobbered event instead.
|
537 | ev = rollBackState[evType][stateKey];
|
538 | }
|
539 | roomJson.state.events.push(ev);
|
540 | });
|
541 | });
|
542 | data.join[roomId] = roomJson;
|
543 | });
|
544 |
|
545 | // Add account data
|
546 | var accData = [];
|
547 | (0, _keys2.default)(this.accountData).forEach(function (evType) {
|
548 | accData.push(_this4.accountData[evType]);
|
549 | });
|
550 |
|
551 | return {
|
552 | nextBatch: this.nextBatch,
|
553 | roomsData: data,
|
554 | groupsData: this.groups,
|
555 | accountData: accData
|
556 | };
|
557 | }
|
558 | }, {
|
559 | key: "getNextBatchToken",
|
560 | value: function getNextBatchToken() {
|
561 | return this.nextBatch;
|
562 | }
|
563 | }]);
|
564 | return SyncAccumulator;
|
565 | }(); /*
|
566 | Copyright 2017 Vector Creations Ltd
|
567 | Copyright 2018 New Vector Ltd
|
568 |
|
569 | Licensed under the Apache License, Version 2.0 (the "License");
|
570 | you may not use this file except in compliance with the License.
|
571 | You may obtain a copy of the License at
|
572 |
|
573 | http://www.apache.org/licenses/LICENSE-2.0
|
574 |
|
575 | Unless required by applicable law or agreed to in writing, software
|
576 | distributed under the License is distributed on an "AS IS" BASIS,
|
577 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
578 | See the License for the specific language governing permissions and
|
579 | limitations under the License.
|
580 | */
|
581 |
|
582 | /**
|
583 | * This is an internal module. See {@link SyncAccumulator} for the public class.
|
584 | * @module sync-accumulator
|
585 | */
|
586 |
|
587 | function setState(eventMap, event) {
|
588 | if (event.state_key === null || event.state_key === undefined || !event.type) {
|
589 | return;
|
590 | }
|
591 | if (!eventMap[event.type]) {
|
592 | eventMap[event.type] = (0, _create2.default)(null);
|
593 | }
|
594 | eventMap[event.type][event.state_key] = event;
|
595 | }
|
596 |
|
597 | module.exports = SyncAccumulator;
|
598 | //# sourceMappingURL=sync-accumulator.js.map |
\ | No newline at end of file |