2019-11-12 17:39:26 +03:00
|
|
|
/*
|
2021-07-10 17:43:46 +03:00
|
|
|
Copyright 2019 - 2021 The Matrix.org Foundation C.I.C.
|
2019-11-12 17:39:26 +03:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
import {
|
|
|
|
IResultRoomEvents,
|
|
|
|
ISearchRequestBody,
|
|
|
|
ISearchResponse,
|
|
|
|
ISearchResult,
|
|
|
|
ISearchResults,
|
|
|
|
SearchOrderBy,
|
|
|
|
} from "matrix-js-sdk/src/@types/search";
|
|
|
|
import { IRoomEventFilter } from "matrix-js-sdk/src/filter";
|
|
|
|
import { EventType } from "matrix-js-sdk/src/@types/event";
|
2021-10-23 01:23:32 +03:00
|
|
|
import { SearchResult } from "matrix-js-sdk/src/models/search-result";
|
2021-07-10 17:43:46 +03:00
|
|
|
|
|
|
|
import { ISearchArgs } from "./indexing/BaseEventIndexManager";
|
2019-11-19 14:52:12 +03:00
|
|
|
import EventIndexPeg from "./indexing/EventIndexPeg";
|
2021-06-29 15:11:58 +03:00
|
|
|
import { MatrixClientPeg } from "./MatrixClientPeg";
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2020-06-04 16:25:02 +03:00
|
|
|
const SEARCH_LIMIT = 10;
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
async function serverSideSearch(
|
|
|
|
term: string,
|
|
|
|
roomId: string = undefined,
|
2022-11-18 19:40:22 +03:00
|
|
|
abortSignal?: AbortSignal,
|
2022-12-12 14:24:14 +03:00
|
|
|
): Promise<{ response: ISearchResponse; query: ISearchRequestBody }> {
|
2020-06-03 15:16:02 +03:00
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
const filter: IRoomEventFilter = {
|
2020-06-04 16:33:51 +03:00
|
|
|
limit: SEARCH_LIMIT,
|
|
|
|
};
|
|
|
|
|
|
|
|
if (roomId !== undefined) filter.rooms = [roomId];
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
const body: ISearchRequestBody = {
|
2020-06-03 15:16:02 +03:00
|
|
|
search_categories: {
|
|
|
|
room_events: {
|
|
|
|
search_term: term,
|
|
|
|
filter: filter,
|
2021-07-10 17:43:46 +03:00
|
|
|
order_by: SearchOrderBy.Recent,
|
2020-06-03 15:16:02 +03:00
|
|
|
event_context: {
|
|
|
|
before_limit: 1,
|
|
|
|
after_limit: 1,
|
|
|
|
include_profile: true,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2022-11-18 19:40:22 +03:00
|
|
|
const response = await client.search({ body: body }, abortSignal);
|
2020-06-03 15:16:02 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
return { response, query: body };
|
2020-06-03 15:16:02 +03:00
|
|
|
}
|
|
|
|
|
2022-11-18 19:40:22 +03:00
|
|
|
async function serverSideSearchProcess(
|
|
|
|
term: string,
|
|
|
|
roomId: string = undefined,
|
|
|
|
abortSignal?: AbortSignal,
|
|
|
|
): Promise<ISearchResults> {
|
2020-06-10 11:09:16 +03:00
|
|
|
const client = MatrixClientPeg.get();
|
2022-11-18 19:40:22 +03:00
|
|
|
const result = await serverSideSearch(term, roomId, abortSignal);
|
2020-06-10 11:09:16 +03:00
|
|
|
|
2020-06-10 11:34:47 +03:00
|
|
|
// The js-sdk method backPaginateRoomEventsSearch() uses _query internally
|
2021-07-10 17:43:46 +03:00
|
|
|
// so we're reusing the concept here since we want to delegate the
|
2020-06-10 11:34:47 +03:00
|
|
|
// pagination back to backPaginateRoomEventsSearch() in some cases.
|
2021-07-10 17:43:46 +03:00
|
|
|
const searchResults: ISearchResults = {
|
2022-11-18 19:40:22 +03:00
|
|
|
abortSignal,
|
2020-06-10 11:09:16 +03:00
|
|
|
_query: result.query,
|
|
|
|
results: [],
|
|
|
|
highlights: [],
|
|
|
|
};
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
return client.processRoomEventsSearch(searchResults, result.response);
|
2020-06-10 11:09:16 +03:00
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
function compareEvents(a: ISearchResult, b: ISearchResult): number {
|
2020-06-03 15:16:02 +03:00
|
|
|
const aEvent = a.result;
|
|
|
|
const bEvent = b.result;
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2020-06-04 16:25:32 +03:00
|
|
|
if (aEvent.origin_server_ts > bEvent.origin_server_ts) return -1;
|
|
|
|
if (aEvent.origin_server_ts < bEvent.origin_server_ts) return 1;
|
|
|
|
|
2020-06-03 15:16:02 +03:00
|
|
|
return 0;
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|
|
|
|
|
2022-11-18 19:40:22 +03:00
|
|
|
async function combinedSearch(searchTerm: string, abortSignal?: AbortSignal): Promise<ISearchResults> {
|
2020-06-03 15:16:02 +03:00
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
|
2019-11-13 12:10:35 +03:00
|
|
|
// Create two promises, one for the local search, one for the
|
|
|
|
// server-side search.
|
2022-11-18 19:40:22 +03:00
|
|
|
const serverSidePromise = serverSideSearch(searchTerm, undefined, abortSignal);
|
2020-06-10 12:45:32 +03:00
|
|
|
const localPromise = localSearch(searchTerm);
|
2019-11-13 12:10:35 +03:00
|
|
|
|
|
|
|
// Wait for both promises to resolve.
|
|
|
|
await Promise.all([serverSidePromise, localPromise]);
|
|
|
|
|
|
|
|
// Get both search results.
|
|
|
|
const localResult = await localPromise;
|
|
|
|
const serverSideResult = await serverSidePromise;
|
|
|
|
|
2020-06-03 15:16:02 +03:00
|
|
|
const serverQuery = serverSideResult.query;
|
|
|
|
const serverResponse = serverSideResult.response;
|
|
|
|
|
|
|
|
const localQuery = localResult.query;
|
|
|
|
const localResponse = localResult.response;
|
|
|
|
|
2020-06-04 16:25:02 +03:00
|
|
|
// Store our queries for later on so we can support pagination.
|
2020-06-10 11:34:47 +03:00
|
|
|
//
|
|
|
|
// We're reusing _query here again to not introduce separate code paths and
|
|
|
|
// concepts for our different pagination methods. We're storing the
|
|
|
|
// server-side next batch separately since the query is the json body of
|
|
|
|
// the request and next_batch needs to be a query parameter.
|
|
|
|
//
|
|
|
|
// We can't put it in the final result that _processRoomEventsSearch()
|
|
|
|
// returns since that one can be either a server-side one, a local one or a
|
|
|
|
// fake one to fetch the remaining cached events. See the docs for
|
|
|
|
// combineEvents() for an explanation why we need to cache events.
|
2021-07-10 17:43:46 +03:00
|
|
|
const emptyResult: ISeshatSearchResults = {
|
2020-06-03 15:16:02 +03:00
|
|
|
seshatQuery: localQuery,
|
|
|
|
_query: serverQuery,
|
2021-07-10 18:00:04 +03:00
|
|
|
serverSideNextBatch: serverResponse.search_categories.room_events.next_batch,
|
2020-06-04 16:25:02 +03:00
|
|
|
cachedEvents: [],
|
|
|
|
oldestEventFrom: "server",
|
2020-06-03 15:16:02 +03:00
|
|
|
results: [],
|
|
|
|
highlights: [],
|
2019-11-12 17:39:26 +03:00
|
|
|
};
|
|
|
|
|
2020-06-04 16:25:02 +03:00
|
|
|
// Combine our results.
|
2020-06-03 15:16:02 +03:00
|
|
|
const combinedResult = combineResponses(emptyResult, localResponse, serverResponse.search_categories.room_events);
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2020-06-04 16:25:02 +03:00
|
|
|
// Let the client process the combined result.
|
2021-07-10 17:43:46 +03:00
|
|
|
const response: ISearchResponse = {
|
2020-06-03 15:16:02 +03:00
|
|
|
search_categories: {
|
|
|
|
room_events: combinedResult,
|
|
|
|
},
|
|
|
|
};
|
2020-05-29 17:32:57 +03:00
|
|
|
|
2021-06-02 06:36:28 +03:00
|
|
|
const result = client.processRoomEventsSearch(emptyResult, response);
|
2020-05-29 17:32:57 +03:00
|
|
|
|
2020-06-10 16:41:55 +03:00
|
|
|
// Restore our encryption info so we can properly re-verify the events.
|
|
|
|
restoreEncryptionInfo(result.results);
|
|
|
|
|
2019-11-13 12:10:35 +03:00
|
|
|
return result;
|
|
|
|
}
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
async function localSearch(
|
|
|
|
searchTerm: string,
|
|
|
|
roomId: string = undefined,
|
|
|
|
processResult = true,
|
2022-12-12 14:24:14 +03:00
|
|
|
): Promise<{ response: IResultRoomEvents; query: ISearchArgs }> {
|
2020-06-10 11:09:16 +03:00
|
|
|
const eventIndex = EventIndexPeg.get();
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
const searchArgs: ISearchArgs = {
|
2019-11-13 12:10:35 +03:00
|
|
|
search_term: searchTerm,
|
|
|
|
before_limit: 1,
|
|
|
|
after_limit: 1,
|
2020-06-04 16:25:02 +03:00
|
|
|
limit: SEARCH_LIMIT,
|
2019-11-13 12:10:35 +03:00
|
|
|
order_by_recency: true,
|
2019-11-13 13:02:54 +03:00
|
|
|
room_id: undefined,
|
2019-11-13 12:10:35 +03:00
|
|
|
};
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2019-11-13 12:10:35 +03:00
|
|
|
if (roomId !== undefined) {
|
|
|
|
searchArgs.room_id = roomId;
|
|
|
|
}
|
2019-11-12 17:39:26 +03:00
|
|
|
|
2020-06-10 11:09:16 +03:00
|
|
|
const localResult = await eventIndex.search(searchArgs);
|
|
|
|
|
|
|
|
searchArgs.next_batch = localResult.next_batch;
|
|
|
|
|
|
|
|
const result = {
|
|
|
|
response: localResult,
|
|
|
|
query: searchArgs,
|
|
|
|
};
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
export interface ISeshatSearchResults extends ISearchResults {
|
|
|
|
seshatQuery?: ISearchArgs;
|
|
|
|
cachedEvents?: ISearchResult[];
|
|
|
|
oldestEventFrom?: "local" | "server";
|
|
|
|
serverSideNextBatch?: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function localSearchProcess(searchTerm: string, roomId: string = undefined): Promise<ISeshatSearchResults> {
|
2020-03-20 13:38:43 +03:00
|
|
|
const emptyResult = {
|
|
|
|
results: [],
|
|
|
|
highlights: [],
|
2021-07-10 17:43:46 +03:00
|
|
|
} as ISeshatSearchResults;
|
2020-03-20 13:38:43 +03:00
|
|
|
|
|
|
|
if (searchTerm === "") return emptyResult;
|
|
|
|
|
2020-06-10 11:09:16 +03:00
|
|
|
const result = await localSearch(searchTerm, roomId);
|
2020-06-03 15:16:02 +03:00
|
|
|
|
2020-06-10 11:09:16 +03:00
|
|
|
emptyResult.seshatQuery = result.query;
|
2020-06-03 15:16:02 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
const response: ISearchResponse = {
|
2020-06-10 11:09:16 +03:00
|
|
|
search_categories: {
|
|
|
|
room_events: result.response,
|
|
|
|
},
|
2020-06-04 13:12:09 +03:00
|
|
|
};
|
2020-06-03 15:16:02 +03:00
|
|
|
|
2021-06-02 06:36:28 +03:00
|
|
|
const processedResult = MatrixClientPeg.get().processRoomEventsSearch(emptyResult, response);
|
2020-06-10 15:25:30 +03:00
|
|
|
// Restore our encryption info so we can properly re-verify the events.
|
|
|
|
restoreEncryptionInfo(processedResult.results);
|
|
|
|
|
|
|
|
return processedResult;
|
2020-05-29 12:44:08 +03:00
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
async function localPagination(searchResult: ISeshatSearchResults): Promise<ISeshatSearchResults> {
|
2020-05-29 12:44:08 +03:00
|
|
|
const eventIndex = EventIndexPeg.get();
|
|
|
|
|
2020-05-29 17:32:57 +03:00
|
|
|
const searchArgs = searchResult.seshatQuery;
|
2020-05-29 12:44:08 +03:00
|
|
|
|
|
|
|
const localResult = await eventIndex.search(searchArgs);
|
2020-05-29 17:32:57 +03:00
|
|
|
searchResult.seshatQuery.next_batch = localResult.next_batch;
|
2020-05-29 12:44:08 +03:00
|
|
|
|
2020-06-10 16:41:55 +03:00
|
|
|
// We only need to restore the encryption state for the new results, so
|
|
|
|
// remember how many of them we got.
|
|
|
|
const newResultCount = localResult.results.length;
|
|
|
|
|
2020-05-29 12:44:08 +03:00
|
|
|
const response = {
|
|
|
|
search_categories: {
|
|
|
|
room_events: localResult,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2021-06-02 06:36:28 +03:00
|
|
|
const result = MatrixClientPeg.get().processRoomEventsSearch(searchResult, response);
|
2020-06-10 16:41:55 +03:00
|
|
|
|
|
|
|
// Restore our encryption info so we can properly re-verify the events.
|
|
|
|
const newSlice = result.results.slice(Math.max(result.results.length - newResultCount, 0));
|
|
|
|
restoreEncryptionInfo(newSlice);
|
|
|
|
|
2020-05-29 12:44:08 +03:00
|
|
|
searchResult.pendingRequest = null;
|
2019-11-13 12:10:35 +03:00
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2021-07-10 18:00:04 +03:00
|
|
|
function compareOldestEvents(firstResults: ISearchResult[], secondResults: ISearchResult[]): number {
|
2020-06-04 16:25:02 +03:00
|
|
|
try {
|
2021-07-10 18:00:04 +03:00
|
|
|
const oldestFirstEvent = firstResults[firstResults.length - 1].result;
|
|
|
|
const oldestSecondEvent = secondResults[secondResults.length - 1].result;
|
2020-06-04 12:18:53 +03:00
|
|
|
|
2020-06-04 16:25:02 +03:00
|
|
|
if (oldestFirstEvent.origin_server_ts <= oldestSecondEvent.origin_server_ts) {
|
2020-06-04 16:51:06 +03:00
|
|
|
return -1;
|
2020-06-04 16:25:02 +03:00
|
|
|
} else {
|
2020-06-04 16:51:06 +03:00
|
|
|
return 1;
|
2020-06-04 16:25:02 +03:00
|
|
|
}
|
|
|
|
} catch {
|
2020-06-04 16:51:06 +03:00
|
|
|
return 0;
|
2020-06-04 12:18:53 +03:00
|
|
|
}
|
2020-06-04 16:25:02 +03:00
|
|
|
}
|
2020-06-04 12:18:53 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
function combineEventSources(
|
|
|
|
previousSearchResult: ISeshatSearchResults,
|
|
|
|
response: IResultRoomEvents,
|
|
|
|
a: ISearchResult[],
|
|
|
|
b: ISearchResult[],
|
|
|
|
): void {
|
2020-06-04 17:57:28 +03:00
|
|
|
// Merge event sources and sort the events.
|
2020-06-04 16:25:02 +03:00
|
|
|
const combinedEvents = a.concat(b).sort(compareEvents);
|
2020-06-04 17:57:28 +03:00
|
|
|
// Put half of the events in the response, and cache the other half.
|
2020-06-04 16:25:02 +03:00
|
|
|
response.results = combinedEvents.slice(0, SEARCH_LIMIT);
|
|
|
|
previousSearchResult.cachedEvents = combinedEvents.slice(SEARCH_LIMIT);
|
|
|
|
}
|
2020-06-04 12:18:53 +03:00
|
|
|
|
2020-06-04 16:51:06 +03:00
|
|
|
/**
|
|
|
|
* Combine the events from our event sources into a sorted result
|
|
|
|
*
|
2020-06-04 17:57:28 +03:00
|
|
|
* This method will first be called from the combinedSearch() method. In this
|
|
|
|
* case we will fetch SEARCH_LIMIT events from the server and the local index.
|
|
|
|
*
|
|
|
|
* The method will put the SEARCH_LIMIT newest events from the server and the
|
|
|
|
* local index in the results part of the response, the rest will be put in the
|
|
|
|
* cachedEvents field of the previousSearchResult (in this case an empty search
|
|
|
|
* result).
|
|
|
|
*
|
|
|
|
* Every subsequent call will be made from the combinedPagination() method, in
|
|
|
|
* this case we will combine the cachedEvents and the next SEARCH_LIMIT events
|
|
|
|
* from either the server or the local index.
|
|
|
|
*
|
|
|
|
* Since we have two event sources and we need to sort the results by date we
|
|
|
|
* need keep on looking for the oldest event. We are implementing a variation of
|
|
|
|
* a sliding window.
|
|
|
|
*
|
2020-06-10 11:44:38 +03:00
|
|
|
* The event sources are here represented as two sorted lists where the smallest
|
|
|
|
* number represents the newest event. The two lists need to be merged in a way
|
|
|
|
* that preserves the sorted property so they can be shown as one search result.
|
|
|
|
* We first fetch SEARCH_LIMIT events from both sources.
|
2020-06-10 11:34:11 +03:00
|
|
|
*
|
2020-06-04 17:57:28 +03:00
|
|
|
* If we set SEARCH_LIMIT to 3:
|
|
|
|
*
|
|
|
|
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
|
|
|
|
* |01, 02, 04|
|
|
|
|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
|
|
|
|
* |03, 05, 09|
|
|
|
|
*
|
|
|
|
* We note that the oldest event is from the local index, and we combine the
|
|
|
|
* results:
|
|
|
|
*
|
|
|
|
* Server window [01, 02, 04]
|
|
|
|
* Local window [03, 05, 09]
|
|
|
|
*
|
|
|
|
* Combined events [01, 02, 03, 04, 05, 09]
|
|
|
|
*
|
|
|
|
* We split the combined result in the part that we want to present and a part
|
|
|
|
* that will be cached.
|
|
|
|
*
|
|
|
|
* Presented events [01, 02, 03]
|
|
|
|
* Cached events [04, 05, 09]
|
|
|
|
*
|
|
|
|
* We slide the window for the server since the oldest event is from the local
|
|
|
|
* index.
|
|
|
|
*
|
|
|
|
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
|
|
|
|
* |06, 07, 08|
|
|
|
|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
|
|
|
|
* |XX, XX, XX|
|
|
|
|
* Cached events [04, 05, 09]
|
|
|
|
*
|
|
|
|
* We note that the oldest event is from the server and we combine the new
|
|
|
|
* server events with the cached ones.
|
|
|
|
*
|
|
|
|
* Cached events [04, 05, 09]
|
|
|
|
* Server events [06, 07, 08]
|
|
|
|
*
|
|
|
|
* Combined events [04, 05, 06, 07, 08, 09]
|
|
|
|
*
|
|
|
|
* We split again.
|
|
|
|
*
|
|
|
|
* Presented events [04, 05, 06]
|
|
|
|
* Cached events [07, 08, 09]
|
|
|
|
*
|
|
|
|
* We slide the local window, the oldest event is on the server.
|
|
|
|
*
|
|
|
|
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
|
|
|
|
* |XX, XX, XX|
|
|
|
|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
|
|
|
|
* |10, 12, 14|
|
|
|
|
*
|
|
|
|
* Cached events [07, 08, 09]
|
|
|
|
* Local events [10, 12, 14]
|
|
|
|
* Combined events [07, 08, 09, 10, 12, 14]
|
|
|
|
*
|
|
|
|
* Presented events [07, 08, 09]
|
|
|
|
* Cached events [10, 12, 14]
|
|
|
|
*
|
|
|
|
* Next up we slide the server window again.
|
|
|
|
*
|
|
|
|
* Server events [01, 02, 04, 06, 07, 08, 11, 13]
|
|
|
|
* |11, 13|
|
|
|
|
* Local events [03, 05, 09, 10, 12, 14, 15, 16]
|
|
|
|
* |XX, XX, XX|
|
|
|
|
*
|
|
|
|
* Cached events [10, 12, 14]
|
|
|
|
* Server events [11, 13]
|
|
|
|
* Combined events [10, 11, 12, 13, 14]
|
|
|
|
*
|
|
|
|
* Presented events [10, 11, 12]
|
|
|
|
* Cached events [13, 14]
|
|
|
|
*
|
|
|
|
* We have one source exhausted, we fetch the rest of our events from the other
|
|
|
|
* source and combine it with our cached events.
|
|
|
|
*
|
|
|
|
*
|
2020-06-04 16:51:06 +03:00
|
|
|
* @param {object} previousSearchResult A search result from a previous search
|
|
|
|
* call.
|
|
|
|
* @param {object} localEvents An unprocessed search result from the event
|
|
|
|
* index.
|
|
|
|
* @param {object} serverEvents An unprocessed search result from the server.
|
|
|
|
*
|
2020-06-04 17:57:28 +03:00
|
|
|
* @return {object} A response object that combines the events from the
|
2020-06-04 16:51:06 +03:00
|
|
|
* different event sources.
|
|
|
|
*
|
|
|
|
*/
|
2021-07-10 17:43:46 +03:00
|
|
|
function combineEvents(
|
|
|
|
previousSearchResult: ISeshatSearchResults,
|
|
|
|
localEvents: IResultRoomEvents = undefined,
|
|
|
|
serverEvents: IResultRoomEvents = undefined,
|
|
|
|
): IResultRoomEvents {
|
|
|
|
const response = {} as IResultRoomEvents;
|
2020-06-04 16:25:02 +03:00
|
|
|
|
|
|
|
const cachedEvents = previousSearchResult.cachedEvents;
|
|
|
|
let oldestEventFrom = previousSearchResult.oldestEventFrom;
|
|
|
|
response.highlights = previousSearchResult.highlights;
|
2020-06-04 12:18:53 +03:00
|
|
|
|
2020-10-19 14:46:55 +03:00
|
|
|
if (localEvents && serverEvents && serverEvents.results) {
|
2020-06-04 17:57:28 +03:00
|
|
|
// This is a first search call, combine the events from the server and
|
|
|
|
// the local index. Note where our oldest event came from, we shall
|
|
|
|
// fetch the next batch of events from the other source.
|
2021-07-10 18:00:04 +03:00
|
|
|
if (compareOldestEvents(localEvents.results, serverEvents.results) < 0) {
|
2020-06-04 12:18:53 +03:00
|
|
|
oldestEventFrom = "local";
|
|
|
|
}
|
|
|
|
|
2020-06-04 16:25:02 +03:00
|
|
|
combineEventSources(previousSearchResult, response, localEvents.results, serverEvents.results);
|
2020-06-03 15:16:02 +03:00
|
|
|
response.highlights = localEvents.highlights.concat(serverEvents.highlights);
|
|
|
|
} else if (localEvents) {
|
2020-06-04 17:57:28 +03:00
|
|
|
// This is a pagination call fetching more events from the local index,
|
|
|
|
// meaning that our oldest event was on the server.
|
|
|
|
// Change the source of the oldest event if our local event is older
|
|
|
|
// than the cached one.
|
2021-07-10 18:00:04 +03:00
|
|
|
if (compareOldestEvents(localEvents.results, cachedEvents) < 0) {
|
2020-06-04 16:25:02 +03:00
|
|
|
oldestEventFrom = "local";
|
2020-06-04 12:18:53 +03:00
|
|
|
}
|
2020-06-04 16:25:02 +03:00
|
|
|
combineEventSources(previousSearchResult, response, localEvents.results, cachedEvents);
|
2020-10-19 14:46:55 +03:00
|
|
|
} else if (serverEvents && serverEvents.results) {
|
2020-06-04 17:57:28 +03:00
|
|
|
// This is a pagination call fetching more events from the server,
|
|
|
|
// meaning that our oldest event was in the local index.
|
|
|
|
// Change the source of the oldest event if our server event is older
|
|
|
|
// than the cached one.
|
2021-07-10 18:00:04 +03:00
|
|
|
if (compareOldestEvents(serverEvents.results, cachedEvents) < 0) {
|
2020-06-04 16:25:02 +03:00
|
|
|
oldestEventFrom = "server";
|
2020-06-04 12:18:53 +03:00
|
|
|
}
|
2020-06-04 16:25:02 +03:00
|
|
|
combineEventSources(previousSearchResult, response, serverEvents.results, cachedEvents);
|
2020-06-04 12:18:53 +03:00
|
|
|
} else {
|
2020-06-04 17:57:28 +03:00
|
|
|
// This is a pagination call where we exhausted both of our event
|
|
|
|
// sources, let's push the remaining cached events.
|
2020-06-04 16:25:02 +03:00
|
|
|
response.results = cachedEvents;
|
2020-06-04 17:57:28 +03:00
|
|
|
previousSearchResult.cachedEvents = [];
|
2020-05-29 17:32:57 +03:00
|
|
|
}
|
|
|
|
|
2020-06-04 12:18:53 +03:00
|
|
|
previousSearchResult.oldestEventFrom = oldestEventFrom;
|
|
|
|
|
2020-06-03 15:30:21 +03:00
|
|
|
return response;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Combine the local and server search responses
|
2020-06-04 17:57:28 +03:00
|
|
|
*
|
|
|
|
* @param {object} previousSearchResult A search result from a previous search
|
|
|
|
* call.
|
|
|
|
* @param {object} localEvents An unprocessed search result from the event
|
|
|
|
* index.
|
|
|
|
* @param {object} serverEvents An unprocessed search result from the server.
|
|
|
|
*
|
|
|
|
* @return {object} A response object that combines the events from the
|
|
|
|
* different event sources.
|
2020-06-03 15:30:21 +03:00
|
|
|
*/
|
2021-07-10 17:43:46 +03:00
|
|
|
function combineResponses(
|
|
|
|
previousSearchResult: ISeshatSearchResults,
|
|
|
|
localEvents: IResultRoomEvents = undefined,
|
|
|
|
serverEvents: IResultRoomEvents = undefined,
|
|
|
|
): IResultRoomEvents {
|
2020-06-04 17:57:28 +03:00
|
|
|
// Combine our events first.
|
2020-06-04 12:18:53 +03:00
|
|
|
const response = combineEvents(previousSearchResult, localEvents, serverEvents);
|
2020-06-03 15:30:21 +03:00
|
|
|
|
2020-06-04 16:25:32 +03:00
|
|
|
// Our first search will contain counts from both sources, subsequent
|
|
|
|
// pagination requests will fetch responses only from one of the sources, so
|
|
|
|
// reuse the first count when we're paginating.
|
2020-06-03 15:30:21 +03:00
|
|
|
if (previousSearchResult.count) {
|
|
|
|
response.count = previousSearchResult.count;
|
|
|
|
} else {
|
|
|
|
response.count = localEvents.count + serverEvents.count;
|
|
|
|
}
|
|
|
|
|
2020-06-04 16:25:32 +03:00
|
|
|
// Update our next batch tokens for the given search sources.
|
2020-06-03 15:16:02 +03:00
|
|
|
if (localEvents) {
|
|
|
|
previousSearchResult.seshatQuery.next_batch = localEvents.next_batch;
|
2020-05-29 17:32:57 +03:00
|
|
|
}
|
2020-06-04 12:18:53 +03:00
|
|
|
if (serverEvents) {
|
2020-06-03 15:16:02 +03:00
|
|
|
previousSearchResult.serverSideNextBatch = serverEvents.next_batch;
|
2020-06-04 12:18:53 +03:00
|
|
|
}
|
|
|
|
|
2020-06-04 16:25:32 +03:00
|
|
|
// Set the response next batch token to one of the tokens from the sources,
|
|
|
|
// this makes sure that if we exhaust one of the sources we continue with
|
|
|
|
// the other one.
|
2020-06-04 12:18:53 +03:00
|
|
|
if (previousSearchResult.seshatQuery.next_batch) {
|
|
|
|
response.next_batch = previousSearchResult.seshatQuery.next_batch;
|
|
|
|
} else if (previousSearchResult.serverSideNextBatch) {
|
|
|
|
response.next_batch = previousSearchResult.serverSideNextBatch;
|
|
|
|
}
|
|
|
|
|
2020-06-04 16:25:32 +03:00
|
|
|
// We collected all search results from the server as well as from Seshat,
|
|
|
|
// we still have some events cached that we'll want to display on the next
|
|
|
|
// pagination request.
|
|
|
|
//
|
|
|
|
// Provide a fake next batch token for that case.
|
2020-06-04 17:57:28 +03:00
|
|
|
if (!response.next_batch && previousSearchResult.cachedEvents.length > 0) {
|
2020-06-04 12:18:53 +03:00
|
|
|
response.next_batch = "cached";
|
2020-05-29 17:32:57 +03:00
|
|
|
}
|
|
|
|
|
2020-06-04 13:12:09 +03:00
|
|
|
return response;
|
2020-05-29 17:32:57 +03:00
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
interface IEncryptedSeshatEvent {
|
|
|
|
curve25519Key: string;
|
|
|
|
ed25519Key: string;
|
|
|
|
algorithm: string;
|
|
|
|
forwardingCurve25519KeyChain: string[];
|
|
|
|
}
|
|
|
|
|
|
|
|
function restoreEncryptionInfo(searchResultSlice: SearchResult[] = []): void {
|
2022-11-07 16:45:34 +03:00
|
|
|
for (const result of searchResultSlice) {
|
|
|
|
const timeline = result.context.getTimeline();
|
2020-06-10 15:25:30 +03:00
|
|
|
|
2022-11-07 16:45:34 +03:00
|
|
|
for (const mxEv of timeline) {
|
2021-07-10 17:43:46 +03:00
|
|
|
const ev = mxEv.event as IEncryptedSeshatEvent;
|
|
|
|
|
|
|
|
if (ev.curve25519Key) {
|
|
|
|
mxEv.makeEncrypted(
|
|
|
|
EventType.RoomMessageEncrypted,
|
|
|
|
{ algorithm: ev.algorithm },
|
|
|
|
ev.curve25519Key,
|
|
|
|
ev.ed25519Key,
|
2020-06-10 15:25:30 +03:00
|
|
|
);
|
2021-07-10 17:43:46 +03:00
|
|
|
// @ts-ignore
|
|
|
|
mxEv.forwardingCurve25519KeyChain = ev.forwardingCurve25519KeyChain;
|
2020-06-10 15:25:30 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
delete ev.curve25519Key;
|
|
|
|
delete ev.ed25519Key;
|
|
|
|
delete ev.algorithm;
|
|
|
|
delete ev.forwardingCurve25519KeyChain;
|
2020-06-10 15:25:30 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
async function combinedPagination(searchResult: ISeshatSearchResults): Promise<ISeshatSearchResults> {
|
2020-05-29 17:32:57 +03:00
|
|
|
const eventIndex = EventIndexPeg.get();
|
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
|
|
|
|
const searchArgs = searchResult.seshatQuery;
|
2020-06-04 16:25:02 +03:00
|
|
|
const oldestEventFrom = searchResult.oldestEventFrom;
|
2020-05-29 17:32:57 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
let localResult: IResultRoomEvents;
|
|
|
|
let serverSideResult: ISearchResponse;
|
2020-05-29 17:32:57 +03:00
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
// Fetch events from the local index if we have a token for it and if it's
|
2020-06-04 17:57:28 +03:00
|
|
|
// the local indexes turn or the server has exhausted its results.
|
|
|
|
if (searchArgs.next_batch && (!searchResult.serverSideNextBatch || oldestEventFrom === "server")) {
|
2020-05-29 17:32:57 +03:00
|
|
|
localResult = await eventIndex.search(searchArgs);
|
|
|
|
}
|
|
|
|
|
2020-06-04 17:57:28 +03:00
|
|
|
// Fetch events from the server if we have a token for it and if it's the
|
|
|
|
// local indexes turn or the local index has exhausted its results.
|
|
|
|
if (searchResult.serverSideNextBatch && (oldestEventFrom === "local" || !searchArgs.next_batch)) {
|
2021-06-29 15:11:58 +03:00
|
|
|
const body = { body: searchResult._query, next_batch: searchResult.serverSideNextBatch };
|
2020-05-29 17:32:57 +03:00
|
|
|
serverSideResult = await client.search(body);
|
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
let serverEvents: IResultRoomEvents;
|
2020-06-04 12:18:53 +03:00
|
|
|
|
|
|
|
if (serverSideResult) {
|
|
|
|
serverEvents = serverSideResult.search_categories.room_events;
|
|
|
|
}
|
|
|
|
|
2020-06-04 17:57:28 +03:00
|
|
|
// Combine our events.
|
2020-06-04 12:18:53 +03:00
|
|
|
const combinedResult = combineResponses(searchResult, localResult, serverEvents);
|
2020-05-29 17:32:57 +03:00
|
|
|
|
|
|
|
const response = {
|
|
|
|
search_categories: {
|
|
|
|
room_events: combinedResult,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2020-10-19 14:46:55 +03:00
|
|
|
const oldResultCount = searchResult.results ? searchResult.results.length : 0;
|
2020-06-10 16:41:55 +03:00
|
|
|
|
2020-06-04 17:57:28 +03:00
|
|
|
// Let the client process the combined result.
|
2021-06-02 06:36:28 +03:00
|
|
|
const result = client.processRoomEventsSearch(searchResult, response);
|
2020-05-29 17:32:57 +03:00
|
|
|
|
2020-06-09 14:53:27 +03:00
|
|
|
// Restore our encryption info so we can properly re-verify the events.
|
2020-06-10 16:41:55 +03:00
|
|
|
const newResultCount = result.results.length - oldResultCount;
|
|
|
|
const newSlice = result.results.slice(Math.max(result.results.length - newResultCount, 0));
|
|
|
|
restoreEncryptionInfo(newSlice);
|
|
|
|
|
|
|
|
searchResult.pendingRequest = null;
|
2020-06-09 14:53:27 +03:00
|
|
|
|
2020-06-04 13:12:09 +03:00
|
|
|
return result;
|
2020-05-29 17:32:57 +03:00
|
|
|
}
|
|
|
|
|
2022-11-18 19:40:22 +03:00
|
|
|
function eventIndexSearch(
|
|
|
|
term: string,
|
|
|
|
roomId: string = undefined,
|
|
|
|
abortSignal?: AbortSignal,
|
|
|
|
): Promise<ISearchResults> {
|
2021-07-10 17:43:46 +03:00
|
|
|
let searchPromise: Promise<ISearchResults>;
|
2019-11-12 17:39:26 +03:00
|
|
|
|
|
|
|
if (roomId !== undefined) {
|
|
|
|
if (MatrixClientPeg.get().isRoomEncrypted(roomId)) {
|
|
|
|
// The search is for a single encrypted room, use our local
|
|
|
|
// search method.
|
2020-06-10 11:09:16 +03:00
|
|
|
searchPromise = localSearchProcess(term, roomId);
|
2019-11-12 17:39:26 +03:00
|
|
|
} else {
|
|
|
|
// The search is for a single non-encrypted room, use the
|
|
|
|
// server-side search.
|
2022-11-18 19:40:22 +03:00
|
|
|
searchPromise = serverSideSearchProcess(term, roomId, abortSignal);
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Search across all rooms, combine a server side search and a
|
|
|
|
// local search.
|
2022-11-18 19:40:22 +03:00
|
|
|
searchPromise = combinedSearch(term, abortSignal);
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|
|
|
|
|
2019-11-13 12:30:38 +03:00
|
|
|
return searchPromise;
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
function eventIndexSearchPagination(searchResult: ISeshatSearchResults): Promise<ISeshatSearchResults> {
|
2020-05-29 12:44:08 +03:00
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
|
2020-05-29 17:32:57 +03:00
|
|
|
const seshatQuery = searchResult.seshatQuery;
|
|
|
|
const serverQuery = searchResult._query;
|
|
|
|
|
|
|
|
if (!seshatQuery) {
|
2020-06-04 16:25:32 +03:00
|
|
|
// This is a search in a non-encrypted room. Do the normal server-side
|
|
|
|
// pagination.
|
2020-05-29 12:44:08 +03:00
|
|
|
return client.backPaginateRoomEventsSearch(searchResult);
|
2020-05-29 17:32:57 +03:00
|
|
|
} else if (!serverQuery) {
|
2020-06-04 16:25:32 +03:00
|
|
|
// This is a search in a encrypted room. Do a local pagination.
|
2020-05-29 17:32:57 +03:00
|
|
|
const promise = localPagination(searchResult);
|
2020-05-29 12:44:08 +03:00
|
|
|
searchResult.pendingRequest = promise;
|
2020-05-29 17:32:57 +03:00
|
|
|
|
2020-05-29 12:44:08 +03:00
|
|
|
return promise;
|
2020-05-29 17:32:57 +03:00
|
|
|
} else {
|
2020-06-04 16:25:32 +03:00
|
|
|
// We have both queries around, this is a search across all rooms so a
|
|
|
|
// combined pagination needs to be done.
|
2020-05-29 17:32:57 +03:00
|
|
|
const promise = combinedPagination(searchResult);
|
|
|
|
searchResult.pendingRequest = promise;
|
|
|
|
|
2020-06-04 13:12:09 +03:00
|
|
|
return promise;
|
2020-05-29 12:44:08 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-10 17:43:46 +03:00
|
|
|
export function searchPagination(searchResult: ISearchResults): Promise<ISearchResults> {
|
2020-05-29 12:44:08 +03:00
|
|
|
const eventIndex = EventIndexPeg.get();
|
|
|
|
const client = MatrixClientPeg.get();
|
|
|
|
|
|
|
|
if (searchResult.pendingRequest) return searchResult.pendingRequest;
|
|
|
|
|
|
|
|
if (eventIndex === null) return client.backPaginateRoomEventsSearch(searchResult);
|
|
|
|
else return eventIndexSearchPagination(searchResult);
|
|
|
|
}
|
|
|
|
|
2022-11-18 19:40:22 +03:00
|
|
|
export default function eventSearch(
|
|
|
|
term: string,
|
|
|
|
roomId: string = undefined,
|
|
|
|
abortSignal?: AbortSignal,
|
|
|
|
): Promise<ISearchResults> {
|
2019-11-12 17:39:26 +03:00
|
|
|
const eventIndex = EventIndexPeg.get();
|
|
|
|
|
2022-11-18 19:40:22 +03:00
|
|
|
if (eventIndex === null) {
|
|
|
|
return serverSideSearchProcess(term, roomId, abortSignal);
|
|
|
|
} else {
|
|
|
|
return eventIndexSearch(term, roomId, abortSignal);
|
|
|
|
}
|
2019-11-12 17:39:26 +03:00
|
|
|
}
|