EventIndexing: Don't mention Seshat in the logs.

This commit is contained in:
Damir Jelić 2019-11-13 15:26:27 +01:00
parent 1316e04776
commit ab7f34b45a

View file

@ -44,7 +44,7 @@ export default class EventIndexer {
if (prevState === null && state === "PREPARED") { if (prevState === null && state === "PREPARED") {
// Load our stored checkpoints, if any. // Load our stored checkpoints, if any.
this.crawlerChekpoints = await indexManager.loadCheckpoints(); this.crawlerChekpoints = await indexManager.loadCheckpoints();
console.log("Seshat: Loaded checkpoints", console.log("EventIndex: Loaded checkpoints",
this.crawlerChekpoints); this.crawlerChekpoints);
return; return;
} }
@ -62,7 +62,7 @@ export default class EventIndexer {
// rooms can use the search provided by the Homeserver. // rooms can use the search provided by the Homeserver.
const encryptedRooms = rooms.filter(isRoomEncrypted); const encryptedRooms = rooms.filter(isRoomEncrypted);
console.log("Seshat: Adding initial crawler checkpoints"); console.log("EventIndex: Adding initial crawler checkpoints");
// Gather the prev_batch tokens and create checkpoints for // Gather the prev_batch tokens and create checkpoints for
// our message crawler. // our message crawler.
@ -70,7 +70,7 @@ export default class EventIndexer {
const timeline = room.getLiveTimeline(); const timeline = room.getLiveTimeline();
const token = timeline.getPaginationToken("b"); const token = timeline.getPaginationToken("b");
console.log("Seshat: Got token for indexer", console.log("EventIndex: Got token for indexer",
room.roomId, token); room.roomId, token);
const backCheckpoint = { const backCheckpoint = {
@ -106,7 +106,7 @@ export default class EventIndexer {
if (prevState === "SYNCING" && state === "SYNCING") { if (prevState === "SYNCING" && state === "SYNCING") {
// A sync was done, presumably we queued up some live events, // A sync was done, presumably we queued up some live events,
// commit them now. // commit them now.
console.log("Seshat: Committing events"); console.log("EventIndex: Committing events");
await indexManager.commitLiveEvents(); await indexManager.commitLiveEvents();
return; return;
} }
@ -177,7 +177,7 @@ export default class EventIndexer {
let cancelled = false; let cancelled = false;
console.log("Seshat: Started crawler function"); console.log("EventIndex: Started crawler function");
const client = MatrixClientPeg.get(); const client = MatrixClientPeg.get();
const indexManager = PlatformPeg.get().getEventIndexingManager(); const indexManager = PlatformPeg.get().getEventIndexingManager();
@ -192,10 +192,10 @@ export default class EventIndexer {
// here. // here.
await sleep(this._crawler_timeout); await sleep(this._crawler_timeout);
console.log("Seshat: Running the crawler loop."); console.log("EventIndex: Running the crawler loop.");
if (cancelled) { if (cancelled) {
console.log("Seshat: Cancelling the crawler."); console.log("EventIndex: Cancelling the crawler.");
break; break;
} }
@ -207,7 +207,7 @@ export default class EventIndexer {
continue; continue;
} }
console.log("Seshat: crawling using checkpoint", checkpoint); console.log("EventIndex: crawling using checkpoint", checkpoint);
// We have a checkpoint, let us fetch some messages, again, very // We have a checkpoint, let us fetch some messages, again, very
// conservatively to not bother our Homeserver too much. // conservatively to not bother our Homeserver too much.
@ -221,13 +221,13 @@ export default class EventIndexer {
checkpoint.roomId, checkpoint.token, 100, checkpoint.roomId, checkpoint.token, 100,
checkpoint.direction); checkpoint.direction);
} catch (e) { } catch (e) {
console.log("Seshat: Error crawling events:", e); console.log("EventIndex: Error crawling events:", e);
this.crawlerChekpoints.push(checkpoint); this.crawlerChekpoints.push(checkpoint);
continue; continue;
} }
if (res.chunk.length === 0) { if (res.chunk.length === 0) {
console.log("Seshat: Done with the checkpoint", checkpoint); console.log("EventIndex: Done with the checkpoint", checkpoint);
// We got to the start/end of our timeline, lets just // We got to the start/end of our timeline, lets just
// delete our checkpoint and go back to sleep. // delete our checkpoint and go back to sleep.
await indexManager.removeCrawlerCheckpoint(checkpoint); await indexManager.removeCrawlerCheckpoint(checkpoint);
@ -289,7 +289,7 @@ export default class EventIndexer {
// stage? // stage?
const filteredEvents = matrixEvents.filter(isValidEvent); const filteredEvents = matrixEvents.filter(isValidEvent);
// Let us convert the events back into a format that Seshat can // Let us convert the events back into a format that EventIndex can
// consume. // consume.
const events = filteredEvents.map((ev) => { const events = filteredEvents.map((ev) => {
const jsonEvent = ev.toJSON(); const jsonEvent = ev.toJSON();
@ -317,7 +317,7 @@ export default class EventIndexer {
}; };
console.log( console.log(
"Seshat: Crawled room", "EventIndex: Crawled room",
client.getRoom(checkpoint.roomId).name, client.getRoom(checkpoint.roomId).name,
"and fetched", events.length, "events.", "and fetched", events.length, "events.",
); );
@ -330,21 +330,21 @@ export default class EventIndexer {
// Let us delete the checkpoint in that case, otherwise push // Let us delete the checkpoint in that case, otherwise push
// the new checkpoint to be used by the crawler. // the new checkpoint to be used by the crawler.
if (eventsAlreadyAdded === true && newCheckpoint.fullCrawl !== true) { if (eventsAlreadyAdded === true && newCheckpoint.fullCrawl !== true) {
console.log("Seshat: Checkpoint had already all events", console.log("EventIndex: Checkpoint had already all events",
"added, stopping the crawl", checkpoint); "added, stopping the crawl", checkpoint);
await indexManager.removeCrawlerCheckpoint(newCheckpoint); await indexManager.removeCrawlerCheckpoint(newCheckpoint);
} else { } else {
this.crawlerChekpoints.push(newCheckpoint); this.crawlerChekpoints.push(newCheckpoint);
} }
} catch (e) { } catch (e) {
console.log("Seshat: Error durring a crawl", e); console.log("EventIndex: Error durring a crawl", e);
// An error occured, put the checkpoint back so we // An error occured, put the checkpoint back so we
// can retry. // can retry.
this.crawlerChekpoints.push(checkpoint); this.crawlerChekpoints.push(checkpoint);
} }
} }
console.log("Seshat: Stopping crawler function"); console.log("EventIndex: Stopping crawler function");
} }
async onLimitedTimeline(room) { async onLimitedTimeline(room) {
@ -362,7 +362,7 @@ export default class EventIndexer {
direction: "b", direction: "b",
}; };
console.log("Seshat: Added checkpoint because of a limited timeline", console.log("EventIndex: Added checkpoint because of a limited timeline",
backwardsCheckpoint); backwardsCheckpoint);
await indexManager.addCrawlerCheckpoint(backwardsCheckpoint); await indexManager.addCrawlerCheckpoint(backwardsCheckpoint);
@ -373,7 +373,7 @@ export default class EventIndexer {
async deleteEventIndex() { async deleteEventIndex() {
const indexManager = PlatformPeg.get().getEventIndexingManager(); const indexManager = PlatformPeg.get().getEventIndexingManager();
if (indexManager !== null) { if (indexManager !== null) {
console.log("Seshat: Deleting event index."); console.log("EventIndex: Deleting event index.");
this.crawlerRef.cancel(); this.crawlerRef.cancel();
await indexManager.deleteEventIndex(); await indexManager.deleteEventIndex();
} }