2019-11-12 15:29:07 +03:00
|
|
|
/*
|
2019-11-18 12:16:29 +03:00
|
|
|
Copyright 2019 The Matrix.org Foundation C.I.C.
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2019-11-19 14:52:12 +03:00
|
|
|
import PlatformPeg from "../PlatformPeg";
|
2019-12-21 00:13:46 +03:00
|
|
|
import {MatrixClientPeg} from "../MatrixClientPeg";
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2020-01-17 12:35:33 +03:00
|
|
|
import * as Matrix from 'matrix-js-sdk';
|
|
|
|
import {EventTimeline} from 'matrix-js-sdk';
|
2020-01-15 14:05:02 +03:00
|
|
|
|
2019-11-20 14:47:20 +03:00
|
|
|
/*
|
2019-11-12 15:29:07 +03:00
|
|
|
* Event indexing class that wraps the platform specific event indexing.
|
|
|
|
*/
|
2019-11-18 16:30:07 +03:00
|
|
|
export default class EventIndex {
|
2019-11-12 15:29:07 +03:00
|
|
|
constructor() {
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints = [];
|
2019-11-12 15:29:07 +03:00
|
|
|
// The time that the crawler will wait between /rooms/{room_id}/messages
|
|
|
|
// requests
|
2019-11-13 18:35:04 +03:00
|
|
|
this._crawlerTimeout = 3000;
|
2019-11-13 18:35:26 +03:00
|
|
|
// The maximum number of events our crawler should fetch in a single
|
|
|
|
// crawl.
|
|
|
|
this._eventsPerCrawl = 100;
|
2019-11-18 17:04:22 +03:00
|
|
|
this._crawler = null;
|
2019-11-12 15:29:07 +03:00
|
|
|
this.liveEventsForIndex = new Set();
|
|
|
|
}
|
|
|
|
|
2019-11-14 16:13:49 +03:00
|
|
|
async init() {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-21 12:38:21 +03:00
|
|
|
|
2019-11-19 12:46:18 +03:00
|
|
|
await indexManager.initEventIndex();
|
2019-11-21 12:38:21 +03:00
|
|
|
console.log("EventIndex: Successfully initialized the event index");
|
|
|
|
|
|
|
|
this.crawlerCheckpoints = await indexManager.loadCheckpoints();
|
|
|
|
console.log("EventIndex: Loaded checkpoints", this.crawlerCheckpoints);
|
2019-11-19 12:46:18 +03:00
|
|
|
|
|
|
|
this.registerListeners();
|
|
|
|
}
|
|
|
|
|
|
|
|
registerListeners() {
|
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
|
2019-11-20 14:30:03 +03:00
|
|
|
client.on('sync', this.onSync);
|
|
|
|
client.on('Room.timeline', this.onRoomTimeline);
|
|
|
|
client.on('Event.decrypted', this.onEventDecrypted);
|
|
|
|
client.on('Room.timelineReset', this.onTimelineReset);
|
2019-11-19 12:46:18 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
removeListeners() {
|
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
if (client === null) return;
|
|
|
|
|
2019-11-20 14:30:03 +03:00
|
|
|
client.removeListener('sync', this.onSync);
|
|
|
|
client.removeListener('Room.timeline', this.onRoomTimeline);
|
|
|
|
client.removeListener('Event.decrypted', this.onEventDecrypted);
|
|
|
|
client.removeListener('Room.timelineReset', this.onTimelineReset);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
2019-11-20 14:30:03 +03:00
|
|
|
onSync = async (state, prevState, data) => {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
if (prevState === "PREPARED" && state === "SYNCING") {
|
|
|
|
const addInitialCheckpoints = async () => {
|
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
const rooms = client.getRooms();
|
|
|
|
|
|
|
|
const isRoomEncrypted = (room) => {
|
|
|
|
return client.isRoomEncrypted(room.roomId);
|
|
|
|
};
|
|
|
|
|
2019-11-18 12:35:57 +03:00
|
|
|
// We only care to crawl the encrypted rooms, non-encrypted.
|
|
|
|
// rooms can use the search provided by the homeserver.
|
2019-11-12 15:29:07 +03:00
|
|
|
const encryptedRooms = rooms.filter(isRoomEncrypted);
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Adding initial crawler checkpoints");
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
// Gather the prev_batch tokens and create checkpoints for
|
|
|
|
// our message crawler.
|
|
|
|
await Promise.all(encryptedRooms.map(async (room) => {
|
|
|
|
const timeline = room.getLiveTimeline();
|
|
|
|
const token = timeline.getPaginationToken("b");
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Got token for indexer",
|
2019-11-12 15:29:07 +03:00
|
|
|
room.roomId, token);
|
|
|
|
|
|
|
|
const backCheckpoint = {
|
|
|
|
roomId: room.roomId,
|
|
|
|
token: token,
|
|
|
|
direction: "b",
|
|
|
|
};
|
|
|
|
|
|
|
|
const forwardCheckpoint = {
|
|
|
|
roomId: room.roomId,
|
|
|
|
token: token,
|
|
|
|
direction: "f",
|
|
|
|
};
|
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.addCrawlerCheckpoint(backCheckpoint);
|
|
|
|
await indexManager.addCrawlerCheckpoint(forwardCheckpoint);
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(backCheckpoint);
|
|
|
|
this.crawlerCheckpoints.push(forwardCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}));
|
|
|
|
};
|
|
|
|
|
|
|
|
// If our indexer is empty we're most likely running Riot the
|
|
|
|
// first time with indexing support or running it with an
|
|
|
|
// initial sync. Add checkpoints to crawl our encrypted rooms.
|
2019-11-13 14:25:16 +03:00
|
|
|
const eventIndexWasEmpty = await indexManager.isEventIndexEmpty();
|
2019-11-12 15:29:07 +03:00
|
|
|
if (eventIndexWasEmpty) await addInitialCheckpoints();
|
|
|
|
|
|
|
|
// Start our crawler.
|
|
|
|
this.startCrawler();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (prevState === "SYNCING" && state === "SYNCING") {
|
|
|
|
// A sync was done, presumably we queued up some live events,
|
|
|
|
// commit them now.
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Committing events");
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.commitLiveEvents();
|
2019-11-12 15:29:07 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-20 14:30:03 +03:00
|
|
|
onRoomTimeline = async (ev, room, toStartOfTimeline, removed, data) => {
|
2019-11-12 15:29:07 +03:00
|
|
|
// We only index encrypted rooms locally.
|
|
|
|
if (!MatrixClientPeg.get().isRoomEncrypted(room.roomId)) return;
|
|
|
|
|
|
|
|
// If it isn't a live event or if it's redacted there's nothing to
|
|
|
|
// do.
|
|
|
|
if (toStartOfTimeline || !data || !data.liveEvent
|
|
|
|
|| ev.isRedacted()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the event is not yet decrypted mark it for the
|
|
|
|
// Event.decrypted callback.
|
|
|
|
if (ev.isBeingDecrypted()) {
|
|
|
|
const eventId = ev.getId();
|
|
|
|
this.liveEventsForIndex.add(eventId);
|
|
|
|
} else {
|
|
|
|
// If the event is decrypted or is unencrypted add it to the
|
|
|
|
// index now.
|
|
|
|
await this.addLiveEventToIndex(ev);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-20 14:30:03 +03:00
|
|
|
onEventDecrypted = async (ev, err) => {
|
2019-11-12 15:29:07 +03:00
|
|
|
const eventId = ev.getId();
|
|
|
|
|
|
|
|
// If the event isn't in our live event set, ignore it.
|
|
|
|
if (!this.liveEventsForIndex.delete(eventId)) return;
|
|
|
|
if (err) return;
|
|
|
|
await this.addLiveEventToIndex(ev);
|
|
|
|
}
|
|
|
|
|
|
|
|
async addLiveEventToIndex(ev) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
if (["m.room.message", "m.room.name", "m.room.topic"]
|
|
|
|
.indexOf(ev.getType()) == -1) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-01-15 12:41:33 +03:00
|
|
|
const jsonEvent = ev.toJSON();
|
|
|
|
|
|
|
|
let e;
|
|
|
|
if (ev.isEncrypted()) e = jsonEvent.decrypted;
|
|
|
|
else e = jsonEvent;
|
|
|
|
|
2019-11-12 15:29:07 +03:00
|
|
|
const profile = {
|
|
|
|
displayname: ev.sender.rawDisplayName,
|
|
|
|
avatar_url: ev.sender.getMxcAvatarUrl(),
|
|
|
|
};
|
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
indexManager.addEventToIndex(e, profile);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
2019-11-18 17:04:22 +03:00
|
|
|
async crawlerFunc() {
|
2019-11-12 15:29:07 +03:00
|
|
|
// TODO either put this in a better place or find a library provided
|
|
|
|
// method that does this.
|
|
|
|
const sleep = async (ms) => {
|
|
|
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
|
|
};
|
|
|
|
|
|
|
|
let cancelled = false;
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Started crawler function");
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
const client = MatrixClientPeg.get();
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-18 17:04:22 +03:00
|
|
|
this._crawler = {};
|
|
|
|
|
|
|
|
this._crawler.cancel = () => {
|
2019-11-12 15:29:07 +03:00
|
|
|
cancelled = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
while (!cancelled) {
|
|
|
|
// This is a low priority task and we don't want to spam our
|
2019-11-18 12:35:57 +03:00
|
|
|
// homeserver with /messages requests so we set a hefty timeout
|
2019-11-12 15:29:07 +03:00
|
|
|
// here.
|
2019-11-13 18:47:21 +03:00
|
|
|
await sleep(this._crawlerTimeout);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Running the crawler loop.");
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
if (cancelled) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2019-11-13 17:57:12 +03:00
|
|
|
const checkpoint = this.crawlerCheckpoints.shift();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
/// There is no checkpoint available currently, one may appear if
|
|
|
|
// a sync with limited room timelines happens, so go back to sleep.
|
|
|
|
if (checkpoint === undefined) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: crawling using checkpoint", checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
// We have a checkpoint, let us fetch some messages, again, very
|
2019-11-18 12:35:57 +03:00
|
|
|
// conservatively to not bother our homeserver too much.
|
2019-11-12 15:29:07 +03:00
|
|
|
const eventMapper = client.getEventMapper();
|
|
|
|
// TODO we need to ensure to use member lazy loading with this
|
|
|
|
// request so we get the correct profiles.
|
|
|
|
let res;
|
|
|
|
|
|
|
|
try {
|
|
|
|
res = await client._createMessagesRequest(
|
2019-11-13 18:35:26 +03:00
|
|
|
checkpoint.roomId, checkpoint.token, this._eventsPerCrawl,
|
2019-11-12 15:29:07 +03:00
|
|
|
checkpoint.direction);
|
|
|
|
} catch (e) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Error crawling events:", e);
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(checkpoint);
|
2019-11-13 14:25:16 +03:00
|
|
|
continue;
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
if (res.chunk.length === 0) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Done with the checkpoint", checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
// We got to the start/end of our timeline, lets just
|
|
|
|
// delete our checkpoint and go back to sleep.
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.removeCrawlerCheckpoint(checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convert the plain JSON events into Matrix events so they get
|
|
|
|
// decrypted if necessary.
|
|
|
|
const matrixEvents = res.chunk.map(eventMapper);
|
|
|
|
let stateEvents = [];
|
|
|
|
if (res.state !== undefined) {
|
|
|
|
stateEvents = res.state.map(eventMapper);
|
|
|
|
}
|
|
|
|
|
|
|
|
const profiles = {};
|
|
|
|
|
|
|
|
stateEvents.forEach(ev => {
|
|
|
|
if (ev.event.content &&
|
|
|
|
ev.event.content.membership === "join") {
|
|
|
|
profiles[ev.event.sender] = {
|
|
|
|
displayname: ev.event.content.displayname,
|
|
|
|
avatar_url: ev.event.content.avatar_url,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
const decryptionPromises = [];
|
|
|
|
|
|
|
|
matrixEvents.forEach(ev => {
|
|
|
|
if (ev.isBeingDecrypted() || ev.isDecryptionFailure()) {
|
|
|
|
// TODO the decryption promise is a private property, this
|
|
|
|
// should either be made public or we should convert the
|
|
|
|
// event that gets fired when decryption is done into a
|
|
|
|
// promise using the once event emitter method:
|
|
|
|
// https://nodejs.org/api/events.html#events_events_once_emitter_name
|
|
|
|
decryptionPromises.push(ev._decryptionPromise);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Let us wait for all the events to get decrypted.
|
|
|
|
await Promise.all(decryptionPromises);
|
|
|
|
|
|
|
|
// We filter out events for which decryption failed, are redacted
|
|
|
|
// or aren't of a type that we know how to index.
|
|
|
|
const isValidEvent = (value) => {
|
|
|
|
return ([
|
|
|
|
"m.room.message",
|
|
|
|
"m.room.name",
|
|
|
|
"m.room.topic",
|
|
|
|
].indexOf(value.getType()) >= 0
|
|
|
|
&& !value.isRedacted() && !value.isDecryptionFailure()
|
|
|
|
);
|
|
|
|
// TODO do we need to check if the event has all the valid
|
|
|
|
// attributes?
|
|
|
|
};
|
|
|
|
|
2019-11-18 17:04:44 +03:00
|
|
|
// TODO if there are no events at this point we're missing a lot
|
|
|
|
// decryption keys, do we want to retry this checkpoint at a later
|
2019-11-12 15:29:07 +03:00
|
|
|
// stage?
|
|
|
|
const filteredEvents = matrixEvents.filter(isValidEvent);
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
// Let us convert the events back into a format that EventIndex can
|
2019-11-12 15:29:07 +03:00
|
|
|
// consume.
|
|
|
|
const events = filteredEvents.map((ev) => {
|
|
|
|
const jsonEvent = ev.toJSON();
|
|
|
|
|
|
|
|
let e;
|
|
|
|
if (ev.isEncrypted()) e = jsonEvent.decrypted;
|
|
|
|
else e = jsonEvent;
|
|
|
|
|
|
|
|
let profile = {};
|
|
|
|
if (e.sender in profiles) profile = profiles[e.sender];
|
|
|
|
const object = {
|
|
|
|
event: e,
|
|
|
|
profile: profile,
|
|
|
|
};
|
|
|
|
return object;
|
|
|
|
});
|
|
|
|
|
|
|
|
// Create a new checkpoint so we can continue crawling the room for
|
|
|
|
// messages.
|
|
|
|
const newCheckpoint = {
|
|
|
|
roomId: checkpoint.roomId,
|
|
|
|
token: res.end,
|
|
|
|
fullCrawl: checkpoint.fullCrawl,
|
|
|
|
direction: checkpoint.direction,
|
|
|
|
};
|
|
|
|
|
|
|
|
console.log(
|
2019-11-13 17:26:27 +03:00
|
|
|
"EventIndex: Crawled room",
|
2019-11-12 15:29:07 +03:00
|
|
|
client.getRoom(checkpoint.roomId).name,
|
|
|
|
"and fetched", events.length, "events.",
|
|
|
|
);
|
|
|
|
|
|
|
|
try {
|
2019-11-13 14:25:16 +03:00
|
|
|
const eventsAlreadyAdded = await indexManager.addHistoricEvents(
|
2019-11-12 15:29:07 +03:00
|
|
|
events, newCheckpoint, checkpoint);
|
|
|
|
// If all events were already indexed we assume that we catched
|
|
|
|
// up with our index and don't need to crawl the room further.
|
|
|
|
// Let us delete the checkpoint in that case, otherwise push
|
|
|
|
// the new checkpoint to be used by the crawler.
|
|
|
|
if (eventsAlreadyAdded === true && newCheckpoint.fullCrawl !== true) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Checkpoint had already all events",
|
2019-11-12 15:29:07 +03:00
|
|
|
"added, stopping the crawl", checkpoint);
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.removeCrawlerCheckpoint(newCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
} else {
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(newCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
} catch (e) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Error durring a crawl", e);
|
2019-11-18 17:04:44 +03:00
|
|
|
// An error occurred, put the checkpoint back so we
|
2019-11-12 15:29:07 +03:00
|
|
|
// can retry.
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-18 17:04:22 +03:00
|
|
|
this._crawler = null;
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Stopping crawler function");
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
2019-11-20 14:30:03 +03:00
|
|
|
onTimelineReset = async (room, timelineSet, resetAllTimelines) => {
|
2019-11-19 12:46:18 +03:00
|
|
|
if (room === null) return;
|
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-13 12:37:20 +03:00
|
|
|
if (!MatrixClientPeg.get().isRoomEncrypted(room.roomId)) return;
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
const timeline = room.getLiveTimeline();
|
|
|
|
const token = timeline.getPaginationToken("b");
|
|
|
|
|
|
|
|
const backwardsCheckpoint = {
|
|
|
|
roomId: room.roomId,
|
|
|
|
token: token,
|
|
|
|
fullCrawl: false,
|
|
|
|
direction: "b",
|
|
|
|
};
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Added checkpoint because of a limited timeline",
|
2019-11-13 17:23:08 +03:00
|
|
|
backwardsCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.addCrawlerCheckpoint(backwardsCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(backwardsCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
startCrawler() {
|
2019-11-18 17:04:22 +03:00
|
|
|
if (this._crawler !== null) return;
|
|
|
|
this.crawlerFunc();
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
2019-11-14 18:13:22 +03:00
|
|
|
stopCrawler() {
|
2019-11-18 17:04:22 +03:00
|
|
|
if (this._crawler === null) return;
|
|
|
|
this._crawler.cancel();
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2019-11-14 18:13:22 +03:00
|
|
|
async close() {
|
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-19 12:46:18 +03:00
|
|
|
this.removeListeners();
|
2019-11-14 18:13:22 +03:00
|
|
|
this.stopCrawler();
|
|
|
|
return indexManager.closeEventIndex();
|
|
|
|
}
|
|
|
|
|
2019-11-12 17:39:26 +03:00
|
|
|
async search(searchArgs) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
|
|
|
return indexManager.searchEventIndex(searchArgs);
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|
2020-01-15 14:05:02 +03:00
|
|
|
|
2020-01-17 12:04:53 +03:00
|
|
|
async loadFileEvents(room, limit = 10, fromEvent = null, direction = EventTimeline.BACKWARDS) {
|
2020-01-15 14:05:02 +03:00
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
|
|
|
|
2020-01-17 13:52:20 +03:00
|
|
|
const loadArgs = {
|
2020-01-17 12:04:53 +03:00
|
|
|
roomId: room.roomId,
|
2020-01-17 13:52:20 +03:00
|
|
|
limit: limit,
|
|
|
|
};
|
2020-01-17 12:04:53 +03:00
|
|
|
|
|
|
|
if (fromEvent) {
|
|
|
|
loadArgs.fromEvent = fromEvent;
|
|
|
|
loadArgs.direction = direction;
|
|
|
|
}
|
|
|
|
|
2020-01-17 13:52:20 +03:00
|
|
|
let events;
|
2020-01-17 12:04:53 +03:00
|
|
|
|
2020-01-15 14:05:02 +03:00
|
|
|
// Get our events from the event index.
|
2020-01-17 12:04:53 +03:00
|
|
|
try {
|
|
|
|
events = await indexManager.loadFileEvents(loadArgs);
|
|
|
|
} catch (e) {
|
|
|
|
console.log("EventIndex: Error getting file events", e);
|
2020-01-17 13:52:20 +03:00
|
|
|
return [];
|
2020-01-17 12:04:53 +03:00
|
|
|
}
|
2020-01-15 14:05:02 +03:00
|
|
|
|
2020-01-17 13:52:20 +03:00
|
|
|
const eventMapper = client.getEventMapper();
|
2020-01-15 14:05:02 +03:00
|
|
|
|
|
|
|
// Turn the events into MatrixEvent objects.
|
|
|
|
const matrixEvents = events.map(e => {
|
|
|
|
const matrixEvent = eventMapper(e.event);
|
|
|
|
|
2020-01-17 12:35:33 +03:00
|
|
|
const member = new Matrix.RoomMember(room.roomId, matrixEvent.getSender());
|
2020-01-15 14:05:02 +03:00
|
|
|
|
|
|
|
// We can't really reconstruct the whole room state from our
|
|
|
|
// EventIndex to calculate the correct display name. Use the
|
|
|
|
// disambiguated form always instead.
|
|
|
|
member.name = e.profile.displayname + " (" + matrixEvent.getSender() + ")";
|
|
|
|
|
|
|
|
// This is sets the avatar URL.
|
|
|
|
const memberEvent = eventMapper(
|
|
|
|
{
|
|
|
|
content: {
|
|
|
|
membership: "join",
|
|
|
|
avatar_url: e.profile.avatar_url,
|
|
|
|
displayname: e.profile.displayname,
|
|
|
|
},
|
|
|
|
type: "m.room.member",
|
|
|
|
event_id: matrixEvent.getId() + ":eventIndex",
|
|
|
|
room_id: matrixEvent.getRoomId(),
|
|
|
|
sender: matrixEvent.getSender(),
|
|
|
|
origin_server_ts: matrixEvent.getTs(),
|
2020-01-17 13:52:20 +03:00
|
|
|
state_key: matrixEvent.getSender(),
|
|
|
|
},
|
2020-01-15 14:05:02 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
// We set this manually to avoid emitting RoomMember.membership and
|
|
|
|
// RoomMember.name events.
|
|
|
|
member.events.member = memberEvent;
|
|
|
|
matrixEvent.sender = member;
|
|
|
|
|
|
|
|
return matrixEvent;
|
|
|
|
});
|
|
|
|
|
2020-01-17 12:04:53 +03:00
|
|
|
return matrixEvents;
|
|
|
|
}
|
|
|
|
|
|
|
|
async populateFileTimeline(timelineSet, timeline, room, limit = 10,
|
|
|
|
fromEvent = null, direction = EventTimeline.BACKWARDS) {
|
2020-01-17 13:52:20 +03:00
|
|
|
const matrixEvents = await this.loadFileEvents(room, limit, fromEvent, direction);
|
2020-01-17 12:04:53 +03:00
|
|
|
|
2020-01-15 14:05:02 +03:00
|
|
|
// Add the events to the live timeline of the file panel.
|
|
|
|
matrixEvents.forEach(e => {
|
|
|
|
if (!timelineSet.eventIdToTimeline(e.getId())) {
|
2020-01-17 12:04:53 +03:00
|
|
|
timelineSet.addEventToTimeline(e, timeline,
|
2020-01-17 13:52:20 +03:00
|
|
|
direction == EventTimeline.BACKWARDS);
|
2020-01-15 14:05:02 +03:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Set the pagination token to the oldest event that we retrieved.
|
2020-01-17 12:04:53 +03:00
|
|
|
if (matrixEvents.length > 0) {
|
|
|
|
timeline.setPaginationToken(matrixEvents[matrixEvents.length - 1].getId(),
|
|
|
|
EventTimeline.BACKWARDS);
|
|
|
|
return true;
|
|
|
|
} else {
|
|
|
|
timeline.setPaginationToken("", EventTimeline.BACKWARDS);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
paginateTimelineWindow(room, timelineWindow, direction, limit) {
|
|
|
|
let tl;
|
|
|
|
|
|
|
|
// TODO this is from the js-sdk, this should probably be exposed to
|
|
|
|
// us through the js-sdk.
|
|
|
|
const moveWindowCap = (titmelineWindow, timeline, direction, limit) => {
|
2020-01-17 13:52:20 +03:00
|
|
|
const count = (direction == EventTimeline.BACKWARDS) ?
|
2020-01-17 12:04:53 +03:00
|
|
|
timeline.retreat(limit) : timeline.advance(limit);
|
|
|
|
|
|
|
|
if (count) {
|
|
|
|
timelineWindow._eventCount += count;
|
2020-01-17 13:52:20 +03:00
|
|
|
const excess = timelineWindow._eventCount - timelineWindow._windowLimit;
|
2020-01-17 12:04:53 +03:00
|
|
|
|
|
|
|
if (excess > 0) {
|
|
|
|
timelineWindow.unpaginate(3, direction != EventTimeline.BACKWARDS);
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
|
|
|
// TODO these private fields should be somehow exposed in the js-sdk.
|
|
|
|
if (direction == EventTimeline.BACKWARDS) tl = timelineWindow._start;
|
|
|
|
else if (direction == EventTimeline.FORWARDS) tl = timelineWindow._end;
|
|
|
|
|
|
|
|
if (!tl) return Promise.resolve(false);
|
|
|
|
if (tl.pendingPaginate) return tl.pendingPaginate;
|
|
|
|
|
|
|
|
if (moveWindowCap(timelineWindow, tl, direction, limit)) {
|
|
|
|
return Promise.resolve(true);
|
|
|
|
}
|
|
|
|
|
|
|
|
const paginationMethod = async (timelineWindow, timeline, room, direction, limit) => {
|
|
|
|
const timelineSet = timelineWindow._timelineSet;
|
|
|
|
const token = timeline.timeline.getPaginationToken(direction);
|
|
|
|
|
|
|
|
const ret = await this.populateFileTimeline(timelineSet, timeline.timeline,
|
|
|
|
room, limit, token, direction);
|
|
|
|
|
2020-01-17 13:52:20 +03:00
|
|
|
moveWindowCap(timelineWindow, timeline, direction, limit);
|
2020-01-17 12:04:53 +03:00
|
|
|
timeline.pendingPaginate = null;
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
};
|
|
|
|
|
|
|
|
const paginationPromise = paginationMethod(timelineWindow, tl, room,
|
|
|
|
direction, limit);
|
|
|
|
tl.pendingPaginate = paginationPromise;
|
|
|
|
|
|
|
|
return paginationPromise;
|
2020-01-15 14:05:02 +03:00
|
|
|
}
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|