mirror of
https://github.com/louislam/uptime-kuma.git
synced 2024-11-27 03:50:12 +03:00
Merge branch 'master' into 2fa
This commit is contained in:
commit
8cd5bad44c
16 changed files with 315 additions and 82 deletions
2
.github/ISSUE_TEMPLATE/ask-for-help.md
vendored
2
.github/ISSUE_TEMPLATE/ask-for-help.md
vendored
|
@ -12,6 +12,8 @@ Please search in Issues without filters: https://github.com/louislam/uptime-kuma
|
||||||
**Info**
|
**Info**
|
||||||
Uptime Kuma Version:
|
Uptime Kuma Version:
|
||||||
Using Docker?: Yes/No
|
Using Docker?: Yes/No
|
||||||
|
Docker Version:
|
||||||
|
Node.js Version (Without Docker only):
|
||||||
OS:
|
OS:
|
||||||
Browser:
|
Browser:
|
||||||
|
|
||||||
|
|
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -25,10 +25,13 @@ A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
|
||||||
**Info**
|
**Info**
|
||||||
- Uptime Kuma Version:
|
Uptime Kuma Version:
|
||||||
- Using Docker?: Yes/No
|
Using Docker?: Yes/No
|
||||||
- OS:
|
Docker Version:
|
||||||
- Browser:
|
Node.js Version (Without Docker only):
|
||||||
|
OS:
|
||||||
|
Browser:
|
||||||
|
|
||||||
|
|
||||||
**Screenshots**
|
**Screenshots**
|
||||||
If applicable, add screenshots to help explain your problem.
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
@ -36,3 +39,6 @@ If applicable, add screenshots to help explain your problem.
|
||||||
**Error Log**
|
**Error Log**
|
||||||
It is easier for us to find out the problem.
|
It is easier for us to find out the problem.
|
||||||
|
|
||||||
|
Docker: "docker logs <container id>"
|
||||||
|
PM2: "~/.pm2/logs/" (e.g. /home/ubuntu/.pm2/logs)
|
||||||
|
|
||||||
|
|
22
db/patch-setting-value-type.sql
Normal file
22
db/patch-setting-value-type.sql
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- Generated by Intellij IDEA
|
||||||
|
create table setting_dg_tmp
|
||||||
|
(
|
||||||
|
id INTEGER
|
||||||
|
primary key autoincrement,
|
||||||
|
key VARCHAR(200) not null
|
||||||
|
unique,
|
||||||
|
value TEXT,
|
||||||
|
type VARCHAR(20)
|
||||||
|
);
|
||||||
|
|
||||||
|
insert into setting_dg_tmp(id, key, value, type) select id, key, value, type from setting;
|
||||||
|
|
||||||
|
drop table setting;
|
||||||
|
|
||||||
|
alter table setting_dg_tmp rename to setting;
|
||||||
|
|
||||||
|
|
||||||
|
COMMIT;
|
42
dockerfile
42
dockerfile
|
@ -1,26 +1,32 @@
|
||||||
|
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
||||||
|
FROM node:14-buster-slim AS build
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# split the sqlite install here, so that it can caches the arm prebuilt
|
||||||
|
# do not modify it, since we don't want to re-compile the arm prebuilt again
|
||||||
|
RUN apt update && \
|
||||||
|
apt --yes install python3 python3-pip python3-dev git g++ make && \
|
||||||
|
ln -s /usr/bin/python3 /usr/bin/python && \
|
||||||
|
npm install mapbox/node-sqlite3#593c9d --build-from-source
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN npm install --legacy-peer-deps && npm run build && npm prune --production
|
||||||
|
|
||||||
FROM node:14-bullseye-slim AS release
|
FROM node:14-bullseye-slim AS release
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# install dependencies
|
# Install Apprise,
|
||||||
RUN apt update && apt --yes install python3 python3-pip python3-dev git g++ make iputils-ping
|
|
||||||
RUN ln -s /usr/bin/python3 /usr/bin/python
|
|
||||||
|
|
||||||
# split the sqlite install here, so that it can caches the arm prebuilt
|
|
||||||
RUN npm install mapbox/node-sqlite3#593c9d
|
|
||||||
|
|
||||||
# Install apprise
|
|
||||||
RUN apt --yes install python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib
|
|
||||||
RUN pip3 --no-cache-dir install apprise && \
|
|
||||||
rm -rf /root/.cache
|
|
||||||
|
|
||||||
# additional package should be added here, since we don't want to re-compile the arm prebuilt again
|
|
||||||
|
|
||||||
# add sqlite3 cli for debugging in the future
|
# add sqlite3 cli for debugging in the future
|
||||||
RUN apt --yes install sqlite3
|
# iputils-ping for ping
|
||||||
|
RUN apt update && \
|
||||||
|
apt --yes install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||||
|
sqlite3 \
|
||||||
|
iputils-ping && \
|
||||||
|
pip3 --no-cache-dir install apprise && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy app files from build layer
|
||||||
COPY . .
|
COPY --from=build /app /app
|
||||||
RUN npm install --legacy-peer-deps && npm run build && npm prune
|
|
||||||
|
|
||||||
EXPOSE 3001
|
EXPOSE 3001
|
||||||
VOLUME ["/app/data"]
|
VOLUME ["/app/data"]
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
||||||
FROM node:14-alpine3.12 AS release
|
FROM node:14-alpine3.12 AS build
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# split the sqlite install here, so that it can caches the arm prebuilt
|
# split the sqlite install here, so that it can caches the arm prebuilt
|
||||||
|
@ -9,13 +9,20 @@ RUN apk add --no-cache --virtual .build-deps make g++ python3 python3-dev git &&
|
||||||
apk del .build-deps && \
|
apk del .build-deps && \
|
||||||
rm -f /usr/bin/python
|
rm -f /usr/bin/python
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN npm install --legacy-peer-deps && npm run build && npm prune --production
|
||||||
|
|
||||||
|
|
||||||
|
FROM node:14-alpine3.12 AS release
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
# Install apprise
|
# Install apprise
|
||||||
RUN apk add --no-cache python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib
|
RUN apk add --no-cache python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
|
||||||
RUN pip3 --no-cache-dir install apprise && \
|
pip3 --no-cache-dir install apprise && \
|
||||||
rm -rf /root/.cache
|
rm -rf /root/.cache
|
||||||
|
|
||||||
COPY . .
|
# Copy app files from build layer
|
||||||
RUN npm install --legacy-peer-deps && npm run build && npm prune
|
COPY --from=build /app /app
|
||||||
|
|
||||||
EXPOSE 3001
|
EXPOSE 3001
|
||||||
VOLUME ["/app/data"]
|
VOLUME ["/app/data"]
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
/*
|
||||||
|
* This script should be run after a period of time (180s), because the server may need some time to prepare.
|
||||||
|
*/
|
||||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
|
||||||
|
|
||||||
let client;
|
let client;
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
# Uptime-Kuma K8s Deployment
|
# Uptime-Kuma K8s Deployment
|
||||||
|
|
||||||
|
⚠ Warning: K8s deployment is provided by contributors. I have no experience with K8s and I can't fix error in the future. I only test Docker and Node.js. Use at your own risk.
|
||||||
|
|
||||||
## How does it work?
|
## How does it work?
|
||||||
|
|
||||||
Kustomize is a tool which builds a complete deployment file for all config elements.
|
Kustomize is a tool which builds a complete deployment file for all config elements.
|
||||||
|
@ -25,4 +28,4 @@ This ingressroute.yml is for the [nginx-ingress-controller](https://kubernetes.g
|
||||||
- run ```kustomize build > apply.yml```
|
- run ```kustomize build > apply.yml```
|
||||||
- run ```kubectl apply -f apply.yml```
|
- run ```kubectl apply -f apply.yml```
|
||||||
|
|
||||||
Now you should see some k8s magic and Uptime-Kuma should be available at the specified address.
|
Now you should see some k8s magic and Uptime-Kuma should be available at the specified address.
|
||||||
|
|
|
@ -30,6 +30,9 @@ spec:
|
||||||
command:
|
command:
|
||||||
- node
|
- node
|
||||||
- extra/healthcheck.js
|
- extra/healthcheck.js
|
||||||
|
initialDelaySeconds: 180
|
||||||
|
periodSeconds: 60
|
||||||
|
timeoutSeconds: 30
|
||||||
readinessProbe:
|
readinessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
path: /
|
path: /
|
||||||
|
|
10
package.json
10
package.json
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "uptime-kuma",
|
"name": "uptime-kuma",
|
||||||
"version": "1.5.3",
|
"version": "1.6.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
@ -18,12 +18,12 @@
|
||||||
"start-server": "node server/server.js",
|
"start-server": "node server/server.js",
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"vite-preview-dist": "vite preview --host",
|
"vite-preview-dist": "vite preview --host",
|
||||||
"build-docker": "npm run build-docker-alpine && npm run build-docker-debian",
|
"build-docker": "npm run build-docker-debian && npm run build-docker-alpine",
|
||||||
"build-docker-alpine": "docker buildx build -f dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.5.3-alpine --target release . --push",
|
"build-docker-alpine": "docker buildx build -f dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.6.0-alpine --target release . --push",
|
||||||
"build-docker-debian": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.5.3 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.5.3-debian --target release . --push",
|
"build-docker-debian": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.6.0 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.6.0-debian --target release . --push",
|
||||||
"build-docker-nightly": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
"build-docker-nightly": "docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
|
||||||
"build-docker-nightly-amd64": "docker buildx build --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
"build-docker-nightly-amd64": "docker buildx build --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
|
||||||
"setup": "git checkout 1.5.3 && npm install --legacy-peer-deps && node node_modules/esbuild/install.js && npm run build && npm prune",
|
"setup": "git checkout 1.6.0 && npm install --legacy-peer-deps && node node_modules/esbuild/install.js && npm run build && npm prune",
|
||||||
"update-version": "node extra/update-version.js",
|
"update-version": "node extra/update-version.js",
|
||||||
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
"mark-as-nightly": "node extra/mark-as-nightly.js",
|
||||||
"reset-password": "node extra/reset-password.js",
|
"reset-password": "node extra/reset-password.js",
|
||||||
|
|
|
@ -1,15 +1,44 @@
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const { R } = require("redbean-node");
|
const { R } = require("redbean-node");
|
||||||
const { setSetting, setting } = require("./util-server");
|
const { setSetting, setting } = require("./util-server");
|
||||||
|
const { debug, sleep } = require("../src/util");
|
||||||
|
const dayjs = require("dayjs");
|
||||||
|
|
||||||
class Database {
|
class Database {
|
||||||
|
|
||||||
static templatePath = "./db/kuma.db"
|
static templatePath = "./db/kuma.db";
|
||||||
static dataDir;
|
static dataDir;
|
||||||
static path;
|
static path;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {boolean}
|
||||||
|
*/
|
||||||
|
static patched = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For Backup only
|
||||||
|
*/
|
||||||
|
static backupPath = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add patch filename in key
|
||||||
|
* Values:
|
||||||
|
* true: Add it regardless of order
|
||||||
|
* false: Do nothing
|
||||||
|
* { parents: []}: Need parents before add it
|
||||||
|
*/
|
||||||
|
static patchList = {
|
||||||
|
"patch-setting-value-type.sql": true,
|
||||||
|
"patch-improve-performance.sql": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The finally version should be 10 after merged tag feature
|
||||||
|
* @deprecated Use patchList for any new feature
|
||||||
|
*/
|
||||||
static latestVersion = 9;
|
static latestVersion = 9;
|
||||||
|
|
||||||
static noReject = true;
|
static noReject = true;
|
||||||
static sqliteInstance = null;
|
|
||||||
|
|
||||||
static async connect() {
|
static async connect() {
|
||||||
const acquireConnectionTimeout = 120 * 1000;
|
const acquireConnectionTimeout = 120 * 1000;
|
||||||
|
@ -60,19 +89,7 @@ class Database {
|
||||||
} else {
|
} else {
|
||||||
console.info("Database patch is needed")
|
console.info("Database patch is needed")
|
||||||
|
|
||||||
console.info("Backup the db")
|
this.backup(version);
|
||||||
const backupPath = this.dataDir + "kuma.db.bak" + version;
|
|
||||||
fs.copyFileSync(Database.path, backupPath);
|
|
||||||
|
|
||||||
const shmPath = Database.path + "-shm";
|
|
||||||
if (fs.existsSync(shmPath)) {
|
|
||||||
fs.copyFileSync(shmPath, shmPath + ".bak" + version);
|
|
||||||
}
|
|
||||||
|
|
||||||
const walPath = Database.path + "-wal";
|
|
||||||
if (fs.existsSync(walPath)) {
|
|
||||||
fs.copyFileSync(walPath, walPath + ".bak" + version);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try catch anything here, if gone wrong, restore the backup
|
// Try catch anything here, if gone wrong, restore the backup
|
||||||
try {
|
try {
|
||||||
|
@ -83,18 +100,92 @@ class Database {
|
||||||
console.info(`Patched ${sqlFile}`);
|
console.info(`Patched ${sqlFile}`);
|
||||||
await setSetting("database_version", i);
|
await setSetting("database_version", i);
|
||||||
}
|
}
|
||||||
console.log("Database Patched Successfully");
|
|
||||||
} catch (ex) {
|
} catch (ex) {
|
||||||
await Database.close();
|
await Database.close();
|
||||||
console.error("Patch db failed!!! Restoring the backup")
|
this.restore();
|
||||||
fs.copyFileSync(backupPath, Database.path);
|
|
||||||
console.error(ex)
|
|
||||||
|
|
||||||
|
console.error(ex)
|
||||||
console.error("Start Uptime-Kuma failed due to patch db failed")
|
console.error("Start Uptime-Kuma failed due to patch db failed")
|
||||||
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues")
|
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues")
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await this.patch2();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call it from patch() only
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
static async patch2() {
|
||||||
|
console.log("Database Patch 2.0 Process");
|
||||||
|
let databasePatchedFiles = await setting("databasePatchedFiles");
|
||||||
|
|
||||||
|
if (! databasePatchedFiles) {
|
||||||
|
databasePatchedFiles = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("Patched files:");
|
||||||
|
debug(databasePatchedFiles);
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (let sqlFilename in this.patchList) {
|
||||||
|
await this.patch2Recursion(sqlFilename, databasePatchedFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.patched) {
|
||||||
|
console.log("Database Patched Successfully");
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (ex) {
|
||||||
|
await Database.close();
|
||||||
|
this.restore();
|
||||||
|
|
||||||
|
console.error(ex)
|
||||||
|
console.error("Start Uptime-Kuma failed due to patch db failed");
|
||||||
|
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
await setSetting("databasePatchedFiles", databasePatchedFiles);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used it patch2() only
|
||||||
|
* @param sqlFilename
|
||||||
|
* @param databasePatchedFiles
|
||||||
|
*/
|
||||||
|
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
|
||||||
|
let value = this.patchList[sqlFilename];
|
||||||
|
|
||||||
|
if (! value) {
|
||||||
|
console.log(sqlFilename + " skip");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if patched
|
||||||
|
if (! databasePatchedFiles[sqlFilename]) {
|
||||||
|
console.log(sqlFilename + " is not patched");
|
||||||
|
|
||||||
|
if (value.parents) {
|
||||||
|
console.log(sqlFilename + " need parents");
|
||||||
|
for (let parentSQLFilename of value.parents) {
|
||||||
|
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.backup(dayjs().format("YYYYMMDDHHmmss"));
|
||||||
|
|
||||||
|
console.log(sqlFilename + " is patching");
|
||||||
|
this.patched = true;
|
||||||
|
await this.importSQLFile("./db/" + sqlFilename);
|
||||||
|
databasePatchedFiles[sqlFilename] = true;
|
||||||
|
console.log(sqlFilename + " is patched successfully");
|
||||||
|
|
||||||
|
} else {
|
||||||
|
console.log(sqlFilename + " is already patched, skip");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -140,10 +231,96 @@ class Database {
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
static async close() {
|
static async close() {
|
||||||
if (this.sqliteInstance) {
|
const listener = (reason, p) => {
|
||||||
this.sqliteInstance.close();
|
Database.noReject = false;
|
||||||
|
};
|
||||||
|
process.addListener("unhandledRejection", listener);
|
||||||
|
|
||||||
|
console.log("Closing DB");
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
Database.noReject = true;
|
||||||
|
await R.close();
|
||||||
|
await sleep(2000);
|
||||||
|
|
||||||
|
if (Database.noReject) {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
console.log("Waiting to close the db");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log("SQLite closed");
|
||||||
|
|
||||||
|
process.removeListener("unhandledRejection", listener);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* One backup one time in this process.
|
||||||
|
* Reset this.backupPath if you want to backup again
|
||||||
|
* @param version
|
||||||
|
*/
|
||||||
|
static backup(version) {
|
||||||
|
if (! this.backupPath) {
|
||||||
|
console.info("Backup the db")
|
||||||
|
this.backupPath = this.dataDir + "kuma.db.bak" + version;
|
||||||
|
fs.copyFileSync(Database.path, this.backupPath);
|
||||||
|
|
||||||
|
const shmPath = Database.path + "-shm";
|
||||||
|
if (fs.existsSync(shmPath)) {
|
||||||
|
this.backupShmPath = shmPath + ".bak" + version;
|
||||||
|
fs.copyFileSync(shmPath, this.backupShmPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
const walPath = Database.path + "-wal";
|
||||||
|
if (fs.existsSync(walPath)) {
|
||||||
|
this.backupWalPath = walPath + ".bak" + version;
|
||||||
|
fs.copyFileSync(walPath, this.backupWalPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static restore() {
|
||||||
|
if (this.backupPath) {
|
||||||
|
console.error("Patch db failed!!! Restoring the backup");
|
||||||
|
|
||||||
|
const shmPath = Database.path + "-shm";
|
||||||
|
const walPath = Database.path + "-wal";
|
||||||
|
|
||||||
|
// Delete patch failed db
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(Database.path)) {
|
||||||
|
fs.unlinkSync(Database.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(shmPath)) {
|
||||||
|
fs.unlinkSync(shmPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(walPath)) {
|
||||||
|
fs.unlinkSync(walPath);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log("Restore failed, you may need to restore the backup manually");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore backup
|
||||||
|
fs.copyFileSync(this.backupPath, Database.path);
|
||||||
|
|
||||||
|
if (this.backupShmPath) {
|
||||||
|
fs.copyFileSync(this.backupShmPath, shmPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.backupWalPath) {
|
||||||
|
fs.copyFileSync(this.backupWalPath, walPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
console.log("Nothing to restore");
|
||||||
}
|
}
|
||||||
console.log("Stopped database");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -123,7 +123,7 @@ export default {
|
||||||
enableDefaultNotificationDescription: "Für jeden neuen Monitor wird diese Benachrichtigung standardmäßig aktiviert. Die Benachrichtigung kann weiterhin für jeden Monitor separat deaktiviert werden.",
|
enableDefaultNotificationDescription: "Für jeden neuen Monitor wird diese Benachrichtigung standardmäßig aktiviert. Die Benachrichtigung kann weiterhin für jeden Monitor separat deaktiviert werden.",
|
||||||
Create: "Erstellen",
|
Create: "Erstellen",
|
||||||
"Auto Get": "Auto Get",
|
"Auto Get": "Auto Get",
|
||||||
backupDescription: "Es können alle Monitore und alle Benachrichtigungen in einer JSON-Datei gesichert werden.",
|
backupDescription: "Es können alle Monitore und Benachrichtigungen in einer JSON-Datei gesichert werden.",
|
||||||
backupDescription2: "PS: Verlaufs- und Ereignisdaten sind nicht enthalten.",
|
backupDescription2: "PS: Verlaufs- und Ereignisdaten sind nicht enthalten.",
|
||||||
backupDescription3: "Sensible Daten wie Benachrichtigungstoken sind in der Exportdatei enthalten, bitte bewahre sie sorgfältig auf.",
|
backupDescription3: "Sensible Daten wie Benachrichtigungstoken sind in der Exportdatei enthalten, bitte bewahre sie sorgfältig auf.",
|
||||||
alertNoFile: "Bitte wähle eine Datei zum importieren aus.",
|
alertNoFile: "Bitte wähle eine Datei zum importieren aus.",
|
||||||
|
@ -141,5 +141,6 @@ export default {
|
||||||
Active: "Aktiv",
|
Active: "Aktiv",
|
||||||
Inactive: "Inaktiv",
|
Inactive: "Inaktiv",
|
||||||
Token: "Token",
|
Token: "Token",
|
||||||
"Show URI": "URI Anzeigen"
|
"Show URI": "URI Anzeigen",
|
||||||
|
"Clear all statistics": "Lösche alle Statistiken"
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,5 +141,6 @@ export default {
|
||||||
Active: "Active",
|
Active: "Active",
|
||||||
Inactive: "Inactive",
|
Inactive: "Inactive",
|
||||||
Token: "Token",
|
Token: "Token",
|
||||||
"Show URI": "Show URI"
|
"Show URI": "Show URI",
|
||||||
|
"Clear all statistics": "Clear all Statistics"
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ export default {
|
||||||
pauseDashboardHome: "暂停",
|
pauseDashboardHome: "暂停",
|
||||||
deleteMonitorMsg: "确定要删除此监控吗?",
|
deleteMonitorMsg: "确定要删除此监控吗?",
|
||||||
deleteNotificationMsg: "确定要删除此消息通知吗?这将对所有监控生效。",
|
deleteNotificationMsg: "确定要删除此消息通知吗?这将对所有监控生效。",
|
||||||
resoverserverDescription: "默认服务器 Cloudflare,可以修改为任意你想要使用的DNS服务器",
|
resoverserverDescription: "可自定义要使用的DNS服务器",
|
||||||
rrtypeDescription: "选择要监控的资源记录类型",
|
rrtypeDescription: "选择要监控的资源记录类型",
|
||||||
pauseMonitorMsg: "确定要暂停吗?",
|
pauseMonitorMsg: "确定要暂停吗?",
|
||||||
Settings: "设置",
|
Settings: "设置",
|
||||||
|
@ -109,23 +109,23 @@ export default {
|
||||||
"Repeat Password": "重复密码",
|
"Repeat Password": "重复密码",
|
||||||
respTime: "Resp. Time (ms)",
|
respTime: "Resp. Time (ms)",
|
||||||
notAvailableShort: "N/A",
|
notAvailableShort: "N/A",
|
||||||
Create: "Create",
|
Create: "创建",
|
||||||
clearEventsMsg: "Are you sure want to delete all events for this monitor?",
|
clearEventsMsg: "确定要删除此监控项的所有事件吗?",
|
||||||
clearHeartbeatsMsg: "Are you sure want to delete all heartbeats for this monitor?",
|
clearHeartbeatsMsg: "确定要删除此监控项的所有状态吗?",
|
||||||
confirmClearStatisticsMsg: "Are you sure want to delete ALL statistics?",
|
confirmClearStatisticsMsg: "确定要删除所有统计信息吗?",
|
||||||
"Clear Data": "Clear Data",
|
"Clear Data": "清除数据",
|
||||||
Events: "Events",
|
Events: "事件",
|
||||||
Heartbeats: "Heartbeats",
|
Heartbeats: "心跳",
|
||||||
"Auto Get": "Auto Get",
|
"Auto Get": "自动获取",
|
||||||
enableDefaultNotificationDescription: "For every new monitor this notification will be enabled by default. You can still disable the notification separately for each monitor.",
|
enableDefaultNotificationDescription: "新的监控项将默认启用,你也可以在每个监控项中分别设置",
|
||||||
"Default enabled": "Default enabled",
|
"Default enabled": "默认开启",
|
||||||
"Also apply to existing monitors": "Also apply to existing monitors",
|
"Also apply to existing monitors": "应用到所有监控项",
|
||||||
"Import/Export Backup": "Import/Export Backup",
|
"Import/Export Backup": "导入/导出备份",
|
||||||
Export: "Export",
|
Export: "导出",
|
||||||
Import: "Import",
|
Import: "导入",
|
||||||
backupDescription: "You can backup all monitors and all notifications into a JSON file.",
|
backupDescription: "你可以将所有的监控项和消息通知备份到一个 JSON 文件中",
|
||||||
backupDescription2: "PS: History and event data is not included.",
|
backupDescription2: "注意: 不包括历史状态和事件数据",
|
||||||
backupDescription3: "Sensitive data such as notification tokens is included in the export file, please keep it carefully.",
|
backupDescription3: "导出的文件中可能包含敏感信息,如消息通知的 Token 信息,请小心存放!",
|
||||||
alertNoFile: "Please select a file to import.",
|
alertNoFile: "请选择一个文件导入",
|
||||||
alertWrongFileType: "Please select a JSON file."
|
alertWrongFileType: "请选择一个 JSON 格式的文件"
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,12 +32,14 @@ export default {
|
||||||
created() {
|
created() {
|
||||||
window.addEventListener("resize", this.onResize);
|
window.addEventListener("resize", this.onResize);
|
||||||
|
|
||||||
|
let protocol = (location.protocol === "https:") ? "wss://" : "ws://";
|
||||||
|
|
||||||
let wsHost;
|
let wsHost;
|
||||||
const env = process.env.NODE_ENV || "production";
|
const env = process.env.NODE_ENV || "production";
|
||||||
if (env === "development" || localStorage.dev === "dev") {
|
if (env === "development" || localStorage.dev === "dev") {
|
||||||
wsHost = ":3001"
|
wsHost = protocol + location.hostname + ":3001";
|
||||||
} else {
|
} else {
|
||||||
wsHost = ""
|
wsHost = protocol + location.host;
|
||||||
}
|
}
|
||||||
|
|
||||||
socket = io(wsHost, {
|
socket = io(wsHost, {
|
||||||
|
|
|
@ -155,7 +155,7 @@
|
||||||
<button v-if="settings.disableAuth" class="btn btn-outline-primary me-1" @click="enableAuth">{{ $t("Enable Auth") }}</button>
|
<button v-if="settings.disableAuth" class="btn btn-outline-primary me-1" @click="enableAuth">{{ $t("Enable Auth") }}</button>
|
||||||
<button v-if="! settings.disableAuth" class="btn btn-primary me-1" @click="confirmDisableAuth">{{ $t("Disable Auth") }}</button>
|
<button v-if="! settings.disableAuth" class="btn btn-primary me-1" @click="confirmDisableAuth">{{ $t("Disable Auth") }}</button>
|
||||||
<button v-if="! settings.disableAuth" class="btn btn-danger me-1" @click="$root.logout">{{ $t("Logout") }}</button>
|
<button v-if="! settings.disableAuth" class="btn btn-danger me-1" @click="$root.logout">{{ $t("Logout") }}</button>
|
||||||
<button class="btn btn-outline-danger me-1" @click="confirmClearStatistics">{{ $t("Clear all Statistics") }}</button>
|
<button class="btn btn-outline-danger me-1" @click="confirmClearStatistics">{{ $t("Clear all statistics") }}</button>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
</div>
|
</div>
|
||||||
|
@ -394,7 +394,7 @@ export default {
|
||||||
notificationList: this.$root.notificationList,
|
notificationList: this.$root.notificationList,
|
||||||
monitorList: monitorList,
|
monitorList: monitorList,
|
||||||
}
|
}
|
||||||
exportData = JSON.stringify(exportData);
|
exportData = JSON.stringify(exportData, null, 4);
|
||||||
let downloadItem = document.createElement("a");
|
let downloadItem = document.createElement("a");
|
||||||
downloadItem.setAttribute("href", "data:application/json;charset=utf-8," + encodeURI(exportData));
|
downloadItem.setAttribute("href", "data:application/json;charset=utf-8," + encodeURI(exportData));
|
||||||
downloadItem.setAttribute("download", fileName);
|
downloadItem.setAttribute("download", fileName);
|
||||||
|
|
Loading…
Reference in a new issue