2019-11-12 15:29:07 +03:00
|
|
|
/*
|
2019-11-18 12:16:29 +03:00
|
|
|
Copyright 2019 The Matrix.org Foundation C.I.C.
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
import PlatformPeg from "./PlatformPeg";
|
|
|
|
import MatrixClientPeg from "./MatrixClientPeg";
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Event indexing class that wraps the platform specific event indexing.
|
|
|
|
*/
|
2019-11-18 16:30:07 +03:00
|
|
|
export default class EventIndex {
|
2019-11-12 15:29:07 +03:00
|
|
|
constructor() {
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints = [];
|
2019-11-12 15:29:07 +03:00
|
|
|
// The time that the crawler will wait between /rooms/{room_id}/messages
|
|
|
|
// requests
|
2019-11-13 18:35:04 +03:00
|
|
|
this._crawlerTimeout = 3000;
|
2019-11-13 18:35:26 +03:00
|
|
|
// The maximum number of events our crawler should fetch in a single
|
|
|
|
// crawl.
|
|
|
|
this._eventsPerCrawl = 100;
|
2019-11-12 15:29:07 +03:00
|
|
|
this._crawlerRef = null;
|
|
|
|
this.liveEventsForIndex = new Set();
|
|
|
|
}
|
|
|
|
|
2019-11-14 16:13:49 +03:00
|
|
|
async init() {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-14 18:13:22 +03:00
|
|
|
return indexManager.initEventIndex();
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
async onSync(state, prevState, data) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
if (prevState === null && state === "PREPARED") {
|
|
|
|
// Load our stored checkpoints, if any.
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints = await indexManager.loadCheckpoints();
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Loaded checkpoints",
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints);
|
2019-11-12 15:29:07 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (prevState === "PREPARED" && state === "SYNCING") {
|
|
|
|
const addInitialCheckpoints = async () => {
|
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
const rooms = client.getRooms();
|
|
|
|
|
|
|
|
const isRoomEncrypted = (room) => {
|
|
|
|
return client.isRoomEncrypted(room.roomId);
|
|
|
|
};
|
|
|
|
|
2019-11-18 12:35:57 +03:00
|
|
|
// We only care to crawl the encrypted rooms, non-encrypted.
|
|
|
|
// rooms can use the search provided by the homeserver.
|
2019-11-12 15:29:07 +03:00
|
|
|
const encryptedRooms = rooms.filter(isRoomEncrypted);
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Adding initial crawler checkpoints");
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
// Gather the prev_batch tokens and create checkpoints for
|
|
|
|
// our message crawler.
|
|
|
|
await Promise.all(encryptedRooms.map(async (room) => {
|
|
|
|
const timeline = room.getLiveTimeline();
|
|
|
|
const token = timeline.getPaginationToken("b");
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Got token for indexer",
|
2019-11-12 15:29:07 +03:00
|
|
|
room.roomId, token);
|
|
|
|
|
|
|
|
const backCheckpoint = {
|
|
|
|
roomId: room.roomId,
|
|
|
|
token: token,
|
|
|
|
direction: "b",
|
|
|
|
};
|
|
|
|
|
|
|
|
const forwardCheckpoint = {
|
|
|
|
roomId: room.roomId,
|
|
|
|
token: token,
|
|
|
|
direction: "f",
|
|
|
|
};
|
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.addCrawlerCheckpoint(backCheckpoint);
|
|
|
|
await indexManager.addCrawlerCheckpoint(forwardCheckpoint);
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(backCheckpoint);
|
|
|
|
this.crawlerCheckpoints.push(forwardCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}));
|
|
|
|
};
|
|
|
|
|
|
|
|
// If our indexer is empty we're most likely running Riot the
|
|
|
|
// first time with indexing support or running it with an
|
|
|
|
// initial sync. Add checkpoints to crawl our encrypted rooms.
|
2019-11-13 14:25:16 +03:00
|
|
|
const eventIndexWasEmpty = await indexManager.isEventIndexEmpty();
|
2019-11-12 15:29:07 +03:00
|
|
|
if (eventIndexWasEmpty) await addInitialCheckpoints();
|
|
|
|
|
|
|
|
// Start our crawler.
|
|
|
|
this.startCrawler();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (prevState === "SYNCING" && state === "SYNCING") {
|
|
|
|
// A sync was done, presumably we queued up some live events,
|
|
|
|
// commit them now.
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Committing events");
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.commitLiveEvents();
|
2019-11-12 15:29:07 +03:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async onRoomTimeline(ev, room, toStartOfTimeline, removed, data) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
// We only index encrypted rooms locally.
|
|
|
|
if (!MatrixClientPeg.get().isRoomEncrypted(room.roomId)) return;
|
|
|
|
|
|
|
|
// If it isn't a live event or if it's redacted there's nothing to
|
|
|
|
// do.
|
|
|
|
if (toStartOfTimeline || !data || !data.liveEvent
|
|
|
|
|| ev.isRedacted()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the event is not yet decrypted mark it for the
|
|
|
|
// Event.decrypted callback.
|
|
|
|
if (ev.isBeingDecrypted()) {
|
|
|
|
const eventId = ev.getId();
|
|
|
|
this.liveEventsForIndex.add(eventId);
|
|
|
|
} else {
|
|
|
|
// If the event is decrypted or is unencrypted add it to the
|
|
|
|
// index now.
|
|
|
|
await this.addLiveEventToIndex(ev);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async onEventDecrypted(ev, err) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
const eventId = ev.getId();
|
|
|
|
|
|
|
|
// If the event isn't in our live event set, ignore it.
|
|
|
|
if (!this.liveEventsForIndex.delete(eventId)) return;
|
|
|
|
if (err) return;
|
|
|
|
await this.addLiveEventToIndex(ev);
|
|
|
|
}
|
|
|
|
|
|
|
|
async addLiveEventToIndex(ev) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
if (["m.room.message", "m.room.name", "m.room.topic"]
|
|
|
|
.indexOf(ev.getType()) == -1) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const e = ev.toJSON().decrypted;
|
|
|
|
const profile = {
|
|
|
|
displayname: ev.sender.rawDisplayName,
|
|
|
|
avatar_url: ev.sender.getMxcAvatarUrl(),
|
|
|
|
};
|
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
indexManager.addEventToIndex(e, profile);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
async crawlerFunc(handle) {
|
|
|
|
// TODO either put this in a better place or find a library provided
|
|
|
|
// method that does this.
|
|
|
|
const sleep = async (ms) => {
|
|
|
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
|
|
};
|
|
|
|
|
|
|
|
let cancelled = false;
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Started crawler function");
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
const client = MatrixClientPeg.get();
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
handle.cancel = () => {
|
|
|
|
cancelled = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
while (!cancelled) {
|
|
|
|
// This is a low priority task and we don't want to spam our
|
2019-11-18 12:35:57 +03:00
|
|
|
// homeserver with /messages requests so we set a hefty timeout
|
2019-11-12 15:29:07 +03:00
|
|
|
// here.
|
2019-11-13 18:47:21 +03:00
|
|
|
await sleep(this._crawlerTimeout);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Running the crawler loop.");
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
if (cancelled) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2019-11-13 17:57:12 +03:00
|
|
|
const checkpoint = this.crawlerCheckpoints.shift();
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
/// There is no checkpoint available currently, one may appear if
|
|
|
|
// a sync with limited room timelines happens, so go back to sleep.
|
|
|
|
if (checkpoint === undefined) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: crawling using checkpoint", checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
// We have a checkpoint, let us fetch some messages, again, very
|
2019-11-18 12:35:57 +03:00
|
|
|
// conservatively to not bother our homeserver too much.
|
2019-11-12 15:29:07 +03:00
|
|
|
const eventMapper = client.getEventMapper();
|
|
|
|
// TODO we need to ensure to use member lazy loading with this
|
|
|
|
// request so we get the correct profiles.
|
|
|
|
let res;
|
|
|
|
|
|
|
|
try {
|
|
|
|
res = await client._createMessagesRequest(
|
2019-11-13 18:35:26 +03:00
|
|
|
checkpoint.roomId, checkpoint.token, this._eventsPerCrawl,
|
2019-11-12 15:29:07 +03:00
|
|
|
checkpoint.direction);
|
|
|
|
} catch (e) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Error crawling events:", e);
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(checkpoint);
|
2019-11-13 14:25:16 +03:00
|
|
|
continue;
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
if (res.chunk.length === 0) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Done with the checkpoint", checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
// We got to the start/end of our timeline, lets just
|
|
|
|
// delete our checkpoint and go back to sleep.
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.removeCrawlerCheckpoint(checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convert the plain JSON events into Matrix events so they get
|
|
|
|
// decrypted if necessary.
|
|
|
|
const matrixEvents = res.chunk.map(eventMapper);
|
|
|
|
let stateEvents = [];
|
|
|
|
if (res.state !== undefined) {
|
|
|
|
stateEvents = res.state.map(eventMapper);
|
|
|
|
}
|
|
|
|
|
|
|
|
const profiles = {};
|
|
|
|
|
|
|
|
stateEvents.forEach(ev => {
|
|
|
|
if (ev.event.content &&
|
|
|
|
ev.event.content.membership === "join") {
|
|
|
|
profiles[ev.event.sender] = {
|
|
|
|
displayname: ev.event.content.displayname,
|
|
|
|
avatar_url: ev.event.content.avatar_url,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
const decryptionPromises = [];
|
|
|
|
|
|
|
|
matrixEvents.forEach(ev => {
|
|
|
|
if (ev.isBeingDecrypted() || ev.isDecryptionFailure()) {
|
|
|
|
// TODO the decryption promise is a private property, this
|
|
|
|
// should either be made public or we should convert the
|
|
|
|
// event that gets fired when decryption is done into a
|
|
|
|
// promise using the once event emitter method:
|
|
|
|
// https://nodejs.org/api/events.html#events_events_once_emitter_name
|
|
|
|
decryptionPromises.push(ev._decryptionPromise);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Let us wait for all the events to get decrypted.
|
|
|
|
await Promise.all(decryptionPromises);
|
|
|
|
|
|
|
|
// We filter out events for which decryption failed, are redacted
|
|
|
|
// or aren't of a type that we know how to index.
|
|
|
|
const isValidEvent = (value) => {
|
|
|
|
return ([
|
|
|
|
"m.room.message",
|
|
|
|
"m.room.name",
|
|
|
|
"m.room.topic",
|
|
|
|
].indexOf(value.getType()) >= 0
|
|
|
|
&& !value.isRedacted() && !value.isDecryptionFailure()
|
|
|
|
);
|
|
|
|
// TODO do we need to check if the event has all the valid
|
|
|
|
// attributes?
|
|
|
|
};
|
|
|
|
|
|
|
|
// TODO if there ar no events at this point we're missing a lot
|
|
|
|
// decryption keys, do we wan't to retry this checkpoint at a later
|
|
|
|
// stage?
|
|
|
|
const filteredEvents = matrixEvents.filter(isValidEvent);
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
// Let us convert the events back into a format that EventIndex can
|
2019-11-12 15:29:07 +03:00
|
|
|
// consume.
|
|
|
|
const events = filteredEvents.map((ev) => {
|
|
|
|
const jsonEvent = ev.toJSON();
|
|
|
|
|
|
|
|
let e;
|
|
|
|
if (ev.isEncrypted()) e = jsonEvent.decrypted;
|
|
|
|
else e = jsonEvent;
|
|
|
|
|
|
|
|
let profile = {};
|
|
|
|
if (e.sender in profiles) profile = profiles[e.sender];
|
|
|
|
const object = {
|
|
|
|
event: e,
|
|
|
|
profile: profile,
|
|
|
|
};
|
|
|
|
return object;
|
|
|
|
});
|
|
|
|
|
|
|
|
// Create a new checkpoint so we can continue crawling the room for
|
|
|
|
// messages.
|
|
|
|
const newCheckpoint = {
|
|
|
|
roomId: checkpoint.roomId,
|
|
|
|
token: res.end,
|
|
|
|
fullCrawl: checkpoint.fullCrawl,
|
|
|
|
direction: checkpoint.direction,
|
|
|
|
};
|
|
|
|
|
|
|
|
console.log(
|
2019-11-13 17:26:27 +03:00
|
|
|
"EventIndex: Crawled room",
|
2019-11-12 15:29:07 +03:00
|
|
|
client.getRoom(checkpoint.roomId).name,
|
|
|
|
"and fetched", events.length, "events.",
|
|
|
|
);
|
|
|
|
|
|
|
|
try {
|
2019-11-13 14:25:16 +03:00
|
|
|
const eventsAlreadyAdded = await indexManager.addHistoricEvents(
|
2019-11-12 15:29:07 +03:00
|
|
|
events, newCheckpoint, checkpoint);
|
|
|
|
// If all events were already indexed we assume that we catched
|
|
|
|
// up with our index and don't need to crawl the room further.
|
|
|
|
// Let us delete the checkpoint in that case, otherwise push
|
|
|
|
// the new checkpoint to be used by the crawler.
|
|
|
|
if (eventsAlreadyAdded === true && newCheckpoint.fullCrawl !== true) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Checkpoint had already all events",
|
2019-11-12 15:29:07 +03:00
|
|
|
"added, stopping the crawl", checkpoint);
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.removeCrawlerCheckpoint(newCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
} else {
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(newCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
} catch (e) {
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Error durring a crawl", e);
|
2019-11-12 15:29:07 +03:00
|
|
|
// An error occured, put the checkpoint back so we
|
|
|
|
// can retry.
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(checkpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Stopping crawler function");
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
2019-11-13 17:23:08 +03:00
|
|
|
async onLimitedTimeline(room) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
2019-11-13 12:37:20 +03:00
|
|
|
if (!MatrixClientPeg.get().isRoomEncrypted(room.roomId)) return;
|
2019-11-12 15:29:07 +03:00
|
|
|
|
|
|
|
const timeline = room.getLiveTimeline();
|
|
|
|
const token = timeline.getPaginationToken("b");
|
|
|
|
|
|
|
|
const backwardsCheckpoint = {
|
|
|
|
roomId: room.roomId,
|
|
|
|
token: token,
|
|
|
|
fullCrawl: false,
|
|
|
|
direction: "b",
|
|
|
|
};
|
|
|
|
|
2019-11-13 17:26:27 +03:00
|
|
|
console.log("EventIndex: Added checkpoint because of a limited timeline",
|
2019-11-13 17:23:08 +03:00
|
|
|
backwardsCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-13 14:25:16 +03:00
|
|
|
await indexManager.addCrawlerCheckpoint(backwardsCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
|
2019-11-13 17:57:12 +03:00
|
|
|
this.crawlerCheckpoints.push(backwardsCheckpoint);
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
startCrawler() {
|
2019-11-14 18:13:22 +03:00
|
|
|
if (this._crawlerRef !== null) return;
|
|
|
|
|
2019-11-12 15:29:07 +03:00
|
|
|
const crawlerHandle = {};
|
|
|
|
this.crawlerFunc(crawlerHandle);
|
2019-11-14 18:13:22 +03:00
|
|
|
this._crawlerRef = crawlerHandle;
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|
|
|
|
|
2019-11-14 18:13:22 +03:00
|
|
|
stopCrawler() {
|
|
|
|
if (this._crawlerRef === null) return;
|
|
|
|
|
2019-11-12 15:29:07 +03:00
|
|
|
this._crawlerRef.cancel();
|
|
|
|
this._crawlerRef = null;
|
|
|
|
}
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2019-11-14 18:13:22 +03:00
|
|
|
async close() {
|
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
|
|
|
this.stopCrawler();
|
|
|
|
return indexManager.closeEventIndex();
|
|
|
|
}
|
|
|
|
|
2019-11-12 17:39:26 +03:00
|
|
|
async search(searchArgs) {
|
2019-11-13 14:25:16 +03:00
|
|
|
const indexManager = PlatformPeg.get().getEventIndexingManager();
|
|
|
|
return indexManager.searchEventIndex(searchArgs);
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|
2019-11-12 15:29:07 +03:00
|
|
|
}
|