uptime-kuma/server/database.js

540 lines
17 KiB
JavaScript
Raw Normal View History

2021-07-21 21:02:35 +03:00
const fs = require("fs");
2021-07-30 07:24:46 +03:00
const { R } = require("redbean-node");
2021-08-06 14:12:49 +03:00
const { setSetting, setting } = require("./util-server");
const { log, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
2021-07-21 21:02:35 +03:00
/**
* Database & App Data Folder
*/
2021-07-21 21:02:35 +03:00
class Database {
static templatePath = "./db/kuma.db";
/**
* Data Dir (Default: ./data)
*/
2021-09-02 16:08:00 +03:00
static dataDir;
/**
* User Upload Dir (Default: ./data/upload)
*/
static uploadDir;
2021-09-02 16:08:00 +03:00
static path;
/**
* @type {boolean}
*/
static patched = false;
/**
* For Backup only
*/
static backupPath = null;
/**
* Add patch filename in key
* Values:
* true: Add it regardless of order
* false: Do nothing
* { parents: []}: Need parents before add it
*/
static patchList = {
"patch-setting-value-type.sql": true,
"patch-improve-performance.sql": true,
2021-09-11 17:37:33 +03:00
"patch-2fa.sql": true,
"patch-add-retry-interval-monitor.sql": true,
2021-09-16 17:48:28 +03:00
"patch-incident-table.sql": true,
"patch-group-table.sql": true,
2021-09-30 19:09:43 +03:00
"patch-monitor-push_token.sql": true,
"patch-http-monitor-method-body-and-headers.sql": true,
2021-10-19 01:42:33 +03:00
"patch-2fa-invalidate-used-token.sql": true,
"patch-notification_sent_history.sql": true,
"patch-monitor-basic-auth.sql": true,
"patch-add-docker-columns.sql": true,
2021-12-27 13:54:48 +03:00
"patch-status-page.sql": true,
"patch-proxy.sql": true,
"patch-monitor-expiry-notification.sql": true,
"patch-status-page-footer-css.sql": true,
2021-11-04 04:46:43 +03:00
"patch-added-mqtt-monitor.sql": true,
"patch-add-clickable-status-page-link.sql": true,
2022-05-12 20:48:03 +03:00
"patch-add-sqlserver-monitor.sql": true,
2022-05-13 20:58:23 +03:00
"patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] },
"patch-grpc-monitor.sql": true,
2022-05-12 12:48:38 +03:00
"patch-add-radius-monitor.sql": true,
2022-01-24 11:18:12 +03:00
"patch-monitor-add-resend-interval.sql": true,
2022-10-11 16:48:43 +03:00
"patch-maintenance-table2.sql": true,
2022-04-26 02:26:57 +03:00
};
/**
* The final version should be 10 after merged tag feature
* @deprecated Use patchList for any new feature
*/
static latestVersion = 10;
2021-07-21 21:02:35 +03:00
static noReject = true;
/**
* Initialize the database
* @param {Object} args Arguments to initialize DB with
*/
2021-09-20 11:29:18 +03:00
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = Database.dataDir + "upload/";
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
log.info("db", `Data Dir: ${Database.dataDir}`);
2021-09-20 11:29:18 +03:00
}
/**
* Connect to the database
* @param {boolean} [testMode=false] Should the connection be
* started in test mode?
* @param {boolean} [autoloadModels=true] Should models be
* automatically loaded?
* @param {boolean} [noLog=false] Should logs not be output?
* @returns {Promise<void>}
*/
static async connect(testMode = false, autoloadModels = true, noLog = false) {
2021-08-22 18:35:24 +03:00
const acquireConnectionTimeout = 120 * 1000;
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
Dialect.prototype._driver = () => require("@louislam/sqlite3");
const knexInstance = knex({
client: Dialect,
connection: {
filename: Database.path,
acquireConnectionTimeout: acquireConnectionTimeout,
},
useNullAsDefault: true,
pool: {
min: 1,
max: 1,
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
});
R.setup(knexInstance);
2021-08-16 21:09:40 +03:00
if (process.env.SQL_LOG === "1") {
R.debug(true);
}
2021-08-09 08:34:44 +03:00
// Auto map the model to a bean object
R.freeze(true);
if (autoloadModels) {
await R.autoloadModels("./server/model");
}
await R.exec("PRAGMA foreign_keys = ON");
if (testMode) {
// Change to MEMORY
await R.exec("PRAGMA journal_mode = MEMORY");
} else {
// Change to WAL
await R.exec("PRAGMA journal_mode = WAL");
}
await R.exec("PRAGMA cache_size = -12000");
await R.exec("PRAGMA auto_vacuum = FULL");
2022-04-06 15:48:13 +03:00
// This ensures that an operating system crash or power failure will not corrupt the database.
// FULL synchronous is very safe, but it is also slower.
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
await R.exec("PRAGMA synchronous = FULL");
if (!noLog) {
log.info("db", "SQLite config:");
log.info("db", await R.getAll("PRAGMA journal_mode"));
log.info("db", await R.getAll("PRAGMA cache_size"));
log.info("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
}
/** Patch the database */
2021-07-21 21:02:35 +03:00
static async patch() {
let version = parseInt(await setting("database_version"));
if (! version) {
version = 0;
}
log.info("db", "Your database version: " + version);
log.info("db", "Latest database version: " + this.latestVersion);
2021-07-21 21:02:35 +03:00
if (version === this.latestVersion) {
log.info("db", "Database patch not needed");
} else if (version > this.latestVersion) {
log.info("db", "Warning: Database version is newer than expected");
2021-07-21 21:02:35 +03:00
} else {
log.info("db", "Database patch is needed");
2021-07-21 21:02:35 +03:00
try {
this.backup(version);
} catch (e) {
log.error("db", e);
log.error("db", "Unable to create a backup before patching the database. Please make sure you have enough space and permission.");
process.exit(1);
}
2021-08-19 12:49:19 +03:00
2021-07-21 21:02:35 +03:00
// Try catch anything here, if gone wrong, restore the backup
try {
for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/patch${i}.sql`;
log.info("db", `Patching ${sqlFile}`);
2021-07-21 21:02:35 +03:00
await Database.importSQLFile(sqlFile);
log.info("db", `Patched ${sqlFile}`);
2021-07-21 21:02:35 +03:00
await setSetting("database_version", i);
}
} catch (ex) {
await Database.close();
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
2021-07-21 21:02:35 +03:00
process.exit(1);
}
}
await this.patch2();
2022-03-08 09:33:35 +03:00
await this.migrateNewStatusPage();
}
/**
* Patch DB using new process
* Call it from patch() only
* @private
* @returns {Promise<void>}
*/
static async patch2() {
log.info("db", "Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
if (! databasePatchedFiles) {
databasePatchedFiles = {};
}
log.debug("db", "Patched files:");
log.debug("db", databasePatchedFiles);
try {
for (let sqlFilename in this.patchList) {
await this.patch2Recursion(sqlFilename, databasePatchedFiles);
}
if (this.patched) {
log.info("db", "Database Patched Successfully");
}
} catch (ex) {
await Database.close();
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
}
await setSetting("databasePatchedFiles", databasePatchedFiles);
}
2022-03-08 09:33:35 +03:00
/**
* Migrate status page value in setting to "status_page" table
* @returns {Promise<void>}
*/
static async migrateNewStatusPage() {
2022-03-24 18:43:07 +03:00
// Fix 1.13.0 empty slug bug
await R.exec("UPDATE status_page SET slug = 'empty-slug-recover' WHERE TRIM(slug) = ''");
2022-03-08 09:33:35 +03:00
let title = await setting("title");
if (title) {
console.log("Migrating Status Page");
2022-03-15 07:00:29 +03:00
let statusPageCheck = await R.findOne("status_page", " slug = 'default' ");
2022-03-08 09:33:35 +03:00
if (statusPageCheck !== null) {
2022-03-15 07:00:29 +03:00
console.log("Migrating Status Page - Skip, default slug record is already existing");
2022-03-08 09:33:35 +03:00
return;
}
let statusPage = R.dispense("status_page");
statusPage.slug = "default";
2022-03-08 09:33:35 +03:00
statusPage.title = title;
2022-03-16 09:14:47 +03:00
statusPage.description = await setting("description");
2022-03-08 09:33:35 +03:00
statusPage.icon = await setting("icon");
statusPage.theme = await setting("statusPageTheme");
statusPage.published = !!await setting("statusPagePublished");
statusPage.search_engine_index = !!await setting("searchEngineIndex");
statusPage.show_tags = !!await setting("statusPageTags");
2022-03-08 09:33:35 +03:00
statusPage.password = null;
if (!statusPage.title) {
statusPage.title = "My Status Page";
}
if (!statusPage.icon) {
statusPage.icon = "";
}
if (!statusPage.theme) {
statusPage.theme = "light";
}
let id = await R.store(statusPage);
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
2022-03-08 09:33:35 +03:00
await R.exec("DELETE FROM setting WHERE type = 'statusPage'");
// Migrate Entry Page if it is status page
let entryPage = await setting("entryPage");
if (entryPage === "statusPage") {
await setSetting("entryPage", "statusPage-default", "general");
}
2022-03-08 09:33:35 +03:00
console.log("Migrating Status Page - Done");
}
}
/**
* Patch database using new patching process
* Used it patch2() only
* @private
* @param sqlFilename
* @param databasePatchedFiles
* @returns {Promise<void>}
*/
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
let value = this.patchList[sqlFilename];
if (! value) {
log.info("db", sqlFilename + " skip");
return;
}
// Check if patched
if (! databasePatchedFiles[sqlFilename]) {
log.info("db", sqlFilename + " is not patched");
if (value.parents) {
log.info("db", sqlFilename + " need parents");
for (let parentSQLFilename of value.parents) {
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
}
}
this.backup(dayjs().format("YYYYMMDDHHmmss"));
log.info("db", sqlFilename + " is patching");
this.patched = true;
await this.importSQLFile("./db/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
log.info("db", sqlFilename + " was patched successfully");
} else {
log.debug("db", sqlFilename + " is already patched, skip");
}
2021-07-21 21:02:35 +03:00
}
/**
* Load an SQL file and execute it
* @param filename Filename of SQL file to import
2021-07-21 21:02:35 +03:00
* @returns {Promise<void>}
*/
static async importSQLFile(filename) {
// Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
2021-07-21 21:02:35 +03:00
await R.getCell("SELECT 1");
let text = fs.readFileSync(filename).toString();
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return ! line.startsWith("--");
2021-07-21 21:02:35 +03:00
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n");
2021-07-21 21:02:35 +03:00
let statements = text.split(";")
.map((statement) => {
return statement.trim();
})
.filter((statement) => {
return statement !== "";
});
2021-07-21 21:02:35 +03:00
for (let statement of statements) {
2021-09-01 10:02:04 +03:00
await R.exec(statement);
2021-07-21 21:02:35 +03:00
}
}
/**
* Aquire a direct connection to database
* @returns {any}
*/
static getBetterSQLite3Database() {
return R.knex.client.acquireConnection();
}
2021-07-21 21:02:35 +03:00
/**
* Special handle, because tarn.js throw a promise reject that cannot be caught
* @returns {Promise<void>}
*/
static async close() {
const listener = (reason, p) => {
Database.noReject = false;
};
process.addListener("unhandledRejection", listener);
log.info("db", "Closing the database");
while (true) {
Database.noReject = true;
await R.close();
await sleep(2000);
if (Database.noReject) {
break;
} else {
log.info("db", "Waiting to close the database");
}
}
log.info("db", "SQLite closed");
process.removeListener("unhandledRejection", listener);
}
/**
* One backup one time in this process.
* Reset this.backupPath if you want to backup again
* @param {string} version Version code of backup
*/
static backup(version) {
if (! this.backupPath) {
log.info("db", "Backing up the database");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);
const shmPath = Database.path + "-shm";
if (fs.existsSync(shmPath)) {
this.backupShmPath = shmPath + ".bak" + version;
fs.copyFileSync(shmPath, this.backupShmPath);
}
const walPath = Database.path + "-wal";
if (fs.existsSync(walPath)) {
this.backupWalPath = walPath + ".bak" + version;
fs.copyFileSync(walPath, this.backupWalPath);
}
// Double confirm if all files actually backup
if (!fs.existsSync(this.backupPath)) {
throw new Error("Backup failed! " + this.backupPath);
}
if (fs.existsSync(shmPath)) {
if (!fs.existsSync(this.backupShmPath)) {
throw new Error("Backup failed! " + this.backupShmPath);
}
}
if (fs.existsSync(walPath)) {
if (!fs.existsSync(this.backupWalPath)) {
throw new Error("Backup failed! " + this.backupWalPath);
}
}
}
}
/** Restore from most recent backup */
static restore() {
if (this.backupPath) {
log.error("db", "Patching the database failed!!! Restoring the backup");
const shmPath = Database.path + "-shm";
const walPath = Database.path + "-wal";
// Delete patch failed db
try {
if (fs.existsSync(Database.path)) {
fs.unlinkSync(Database.path);
}
if (fs.existsSync(shmPath)) {
fs.unlinkSync(shmPath);
}
if (fs.existsSync(walPath)) {
fs.unlinkSync(walPath);
}
} catch (e) {
log.error("db", "Restore failed; you may need to restore the backup manually");
process.exit(1);
}
// Restore backup
fs.copyFileSync(this.backupPath, Database.path);
if (this.backupShmPath) {
fs.copyFileSync(this.backupShmPath, shmPath);
}
if (this.backupWalPath) {
fs.copyFileSync(this.backupWalPath, walPath);
}
} else {
log.info("db", "Nothing to restore");
2021-07-21 21:02:35 +03:00
}
}
/** Get the size of the database */
static getSize() {
log.debug("db", "Database.getSize()");
let stats = fs.statSync(Database.path);
log.debug("db", stats);
return stats.size;
}
/**
* Shrink the database
* @returns {Promise<void>}
*/
static async shrink() {
await R.exec("VACUUM");
}
2021-07-21 21:02:35 +03:00
}
module.exports = Database;