UNPKG

13.2 kBJavaScriptView Raw
1/*
2Copyright 2017 Vector Creations Ltd
3
4Licensed under the Apache License, Version 2.0 (the "License");
5you may not use this file except in compliance with the License.
6You may obtain a copy of the License at
7
8 http://www.apache.org/licenses/LICENSE-2.0
9
10Unless required by applicable law or agreed to in writing, software
11distributed under the License is distributed on an "AS IS" BASIS,
12WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13See the License for the specific language governing permissions and
14limitations under the License.
15*/
16
17"use strict";
18import 'source-map-support/register';
19import utils from "../test-utils";
20import sdk from "../..";
21import expect from 'expect';
22
23const SyncAccumulator = sdk.SyncAccumulator;
24
25describe("SyncAccumulator", function() {
26 let sa;
27
28 beforeEach(function() {
29 utils.beforeEach(this); // eslint-disable-line babel/no-invalid-this
30 sa = new SyncAccumulator({
31 maxTimelineEntries: 10,
32 });
33 });
34
35 it("should return the same /sync response if accumulated exactly once", () => {
36 // technically cheating since we also cheekily pre-populate keys we
37 // know that the sync accumulator will pre-populate.
38 // It isn't 100% transitive.
39 const res = {
40 next_batch: "abc",
41 rooms: {
42 invite: {},
43 leave: {},
44 join: {
45 "!foo:bar": {
46 account_data: { events: [] },
47 ephemeral: { events: [] },
48 unread_notifications: {},
49 state: {
50 events: [
51 member("alice", "join"),
52 member("bob", "join"),
53 ],
54 },
55 summary: {
56 "m.heroes": undefined,
57 "m.joined_member_count": undefined,
58 "m.invited_member_count": undefined,
59 },
60 timeline: {
61 events: [msg("alice", "hi")],
62 prev_batch: "something",
63 },
64 },
65 },
66 },
67 };
68 sa.accumulate(res);
69 const output = sa.getJSON();
70 expect(output.nextBatch).toEqual(res.next_batch);
71 expect(output.roomsData).toEqual(res.rooms);
72 });
73
74 it("should prune the timeline to the oldest prev_batch within the limit", () => {
75 // maxTimelineEntries is 10 so we should get back all
76 // 10 timeline messages with a prev_batch of "pinned_to_1"
77 sa.accumulate(syncSkeleton({
78 state: { events: [member("alice", "join")] },
79 timeline: {
80 events: [
81 msg("alice", "1"),
82 msg("alice", "2"),
83 msg("alice", "3"),
84 msg("alice", "4"),
85 msg("alice", "5"),
86 msg("alice", "6"),
87 msg("alice", "7"),
88 ],
89 prev_batch: "pinned_to_1",
90 },
91 }));
92 sa.accumulate(syncSkeleton({
93 state: { events: [] },
94 timeline: {
95 events: [
96 msg("alice", "8"),
97 ],
98 prev_batch: "pinned_to_8",
99 },
100 }));
101 sa.accumulate(syncSkeleton({
102 state: { events: [] },
103 timeline: {
104 events: [
105 msg("alice", "9"),
106 msg("alice", "10"),
107 ],
108 prev_batch: "pinned_to_10",
109 },
110 }));
111
112 let output = sa.getJSON().roomsData.join["!foo:bar"];
113
114 expect(output.timeline.events.length).toEqual(10);
115 output.timeline.events.forEach((e, i) => {
116 expect(e.content.body).toEqual(""+(i+1));
117 });
118 expect(output.timeline.prev_batch).toEqual("pinned_to_1");
119
120 // accumulate more messages. Now it can't have a prev_batch of "pinned to 1"
121 // AND give us <= 10 messages without losing messages in-between.
122 // It should try to find the oldest prev_batch which still fits into 10
123 // messages, which is "pinned to 8".
124 sa.accumulate(syncSkeleton({
125 state: { events: [] },
126 timeline: {
127 events: [
128 msg("alice", "11"),
129 msg("alice", "12"),
130 msg("alice", "13"),
131 msg("alice", "14"),
132 msg("alice", "15"),
133 msg("alice", "16"),
134 msg("alice", "17"),
135 ],
136 prev_batch: "pinned_to_11",
137 },
138 }));
139
140 output = sa.getJSON().roomsData.join["!foo:bar"];
141
142 expect(output.timeline.events.length).toEqual(10);
143 output.timeline.events.forEach((e, i) => {
144 expect(e.content.body).toEqual(""+(i+8));
145 });
146 expect(output.timeline.prev_batch).toEqual("pinned_to_8");
147 });
148
149 it("should remove the stored timeline on limited syncs", () => {
150 sa.accumulate(syncSkeleton({
151 state: { events: [member("alice", "join")] },
152 timeline: {
153 events: [
154 msg("alice", "1"),
155 msg("alice", "2"),
156 msg("alice", "3"),
157 ],
158 prev_batch: "pinned_to_1",
159 },
160 }));
161 // some time passes and now we get a limited sync
162 sa.accumulate(syncSkeleton({
163 state: { events: [] },
164 timeline: {
165 limited: true,
166 events: [
167 msg("alice", "51"),
168 msg("alice", "52"),
169 msg("alice", "53"),
170 ],
171 prev_batch: "pinned_to_51",
172 },
173 }));
174
175 const output = sa.getJSON().roomsData.join["!foo:bar"];
176
177 expect(output.timeline.events.length).toEqual(3);
178 output.timeline.events.forEach((e, i) => {
179 expect(e.content.body).toEqual(""+(i+51));
180 });
181 expect(output.timeline.prev_batch).toEqual("pinned_to_51");
182 });
183
184 it("should drop typing notifications", () => {
185 const res = syncSkeleton({
186 ephemeral: {
187 events: [{
188 type: "m.typing",
189 content: {
190 user_ids: ["@alice:localhost"],
191 },
192 room_id: "!foo:bar",
193 }],
194 },
195 });
196 sa.accumulate(res);
197 expect(
198 sa.getJSON().roomsData.join["!foo:bar"].ephemeral.events.length,
199 ).toEqual(0);
200 });
201
202 it("should clobber account data based on event type", () => {
203 const acc1 = {
204 type: "favourite.food",
205 content: {
206 food: "banana",
207 },
208 };
209 const acc2 = {
210 type: "favourite.food",
211 content: {
212 food: "apple",
213 },
214 };
215 sa.accumulate(syncSkeleton({
216 account_data: {
217 events: [acc1],
218 },
219 }));
220 sa.accumulate(syncSkeleton({
221 account_data: {
222 events: [acc2],
223 },
224 }));
225 expect(
226 sa.getJSON().roomsData.join["!foo:bar"].account_data.events.length,
227 ).toEqual(1);
228 expect(
229 sa.getJSON().roomsData.join["!foo:bar"].account_data.events[0],
230 ).toEqual(acc2);
231 });
232
233 it("should clobber global account data based on event type", () => {
234 const acc1 = {
235 type: "favourite.food",
236 content: {
237 food: "banana",
238 },
239 };
240 const acc2 = {
241 type: "favourite.food",
242 content: {
243 food: "apple",
244 },
245 };
246 sa.accumulate({
247 account_data: {
248 events: [acc1],
249 },
250 });
251 sa.accumulate({
252 account_data: {
253 events: [acc2],
254 },
255 });
256 expect(
257 sa.getJSON().accountData.length,
258 ).toEqual(1);
259 expect(
260 sa.getJSON().accountData[0],
261 ).toEqual(acc2);
262 });
263
264 it("should accumulate read receipts", () => {
265 const receipt1 = {
266 type: "m.receipt",
267 room_id: "!foo:bar",
268 content: {
269 "$event1:localhost": {
270 "m.read": {
271 "@alice:localhost": { ts: 1 },
272 "@bob:localhost": { ts: 2 },
273 },
274 "some.other.receipt.type": {
275 "@should_be_ignored:localhost": { key: "val" },
276 },
277 },
278 },
279 };
280 const receipt2 = {
281 type: "m.receipt",
282 room_id: "!foo:bar",
283 content: {
284 "$event2:localhost": {
285 "m.read": {
286 "@bob:localhost": { ts: 2 }, // clobbers event1 receipt
287 "@charlie:localhost": { ts: 3 },
288 },
289 },
290 },
291 };
292 sa.accumulate(syncSkeleton({
293 ephemeral: {
294 events: [receipt1],
295 },
296 }));
297 sa.accumulate(syncSkeleton({
298 ephemeral: {
299 events: [receipt2],
300 },
301 }));
302
303 expect(
304 sa.getJSON().roomsData.join["!foo:bar"].ephemeral.events.length,
305 ).toEqual(1);
306 expect(
307 sa.getJSON().roomsData.join["!foo:bar"].ephemeral.events[0],
308 ).toEqual({
309 type: "m.receipt",
310 room_id: "!foo:bar",
311 content: {
312 "$event1:localhost": {
313 "m.read": {
314 "@alice:localhost": { ts: 1 },
315 },
316 },
317 "$event2:localhost": {
318 "m.read": {
319 "@bob:localhost": { ts: 2 },
320 "@charlie:localhost": { ts: 3 },
321 },
322 },
323 },
324 });
325 });
326
327 describe("summary field", function() {
328 function createSyncResponseWithSummary(summary) {
329 return {
330 next_batch: "abc",
331 rooms: {
332 invite: {},
333 leave: {},
334 join: {
335 "!foo:bar": {
336 account_data: { events: [] },
337 ephemeral: { events: [] },
338 unread_notifications: {},
339 state: {
340 events: [],
341 },
342 summary: summary,
343 timeline: {
344 events: [],
345 prev_batch: "something",
346 },
347 },
348 },
349 },
350 };
351 }
352
353 it("should copy summary properties", function() {
354 sa.accumulate(createSyncResponseWithSummary({
355 "m.heroes": ["@alice:bar"],
356 "m.invited_member_count": 2,
357 }));
358 const summary = sa.getJSON().roomsData.join["!foo:bar"].summary;
359 expect(summary["m.invited_member_count"]).toEqual(2);
360 expect(summary["m.heroes"]).toEqual(["@alice:bar"]);
361 });
362
363 it("should accumulate summary properties", function() {
364 sa.accumulate(createSyncResponseWithSummary({
365 "m.heroes": ["@alice:bar"],
366 "m.invited_member_count": 2,
367 }));
368 sa.accumulate(createSyncResponseWithSummary({
369 "m.heroes": ["@bob:bar"],
370 "m.joined_member_count": 5,
371 }));
372 const summary = sa.getJSON().roomsData.join["!foo:bar"].summary;
373 expect(summary["m.invited_member_count"]).toEqual(2);
374 expect(summary["m.joined_member_count"]).toEqual(5);
375 expect(summary["m.heroes"]).toEqual(["@bob:bar"]);
376 });
377 });
378});
379
380function syncSkeleton(joinObj) {
381 joinObj = joinObj || {};
382 return {
383 next_batch: "abc",
384 rooms: {
385 join: {
386 "!foo:bar": joinObj,
387 },
388 },
389 };
390}
391
392function msg(localpart, text) {
393 return {
394 content: {
395 body: text,
396 },
397 origin_server_ts: 123456789,
398 sender: "@" + localpart + ":localhost",
399 type: "m.room.message",
400 };
401}
402
403function member(localpart, membership) {
404 return {
405 content: {
406 membership: membership,
407 },
408 origin_server_ts: 123456789,
409 state_key: "@" + localpart + ":localhost",
410 sender: "@" + localpart + ":localhost",
411 type: "m.room.member",
412 };
413}