Merge branch 'master' of https://github.com/louislam/uptime-kuma into uptime-badges
commit
e103ac8335
@ -1,22 +0,0 @@
|
||||
name: 'Automatically close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
#Run once a day at midnight
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
with:
|
||||
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
|
||||
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.'
|
||||
close-pr-message: 'This PR was closed because it has been stalled for 7 days with no activity.'
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,'
|
||||
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,'
|
||||
exempt-issue-assignees: 'louislam'
|
||||
exempt-pr-assignees: 'louislam'
|
@ -1,9 +1,13 @@
|
||||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
"customSyntax": "postcss-html",
|
||||
"rules": {
|
||||
"indentation": 4,
|
||||
"no-descending-specificity": null,
|
||||
"selector-list-comma-newline-after": null,
|
||||
"declaration-empty-line-before": null
|
||||
"declaration-empty-line-before": null,
|
||||
"alpha-value-notation": "number",
|
||||
"color-function-notation": "legacy",
|
||||
"shorthand-property-no-redundant-values": null
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
const config = {};
|
||||
|
||||
if (process.env.TEST_FRONTEND) {
|
||||
config.presets = ["@babel/preset-env"];
|
||||
config.presets = [ "@babel/preset-env" ];
|
||||
}
|
||||
|
||||
if (process.env.TEST_BACKEND) {
|
||||
config.plugins = ["babel-plugin-rewire"];
|
||||
config.plugins = [ "babel-plugin-rewire" ];
|
||||
}
|
||||
|
||||
module.exports = config;
|
||||
|
@ -0,0 +1,16 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_topic TEXT;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_success_message VARCHAR(255);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_username VARCHAR(255);
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD mqtt_password VARCHAR(255);
|
||||
|
||||
COMMIT;
|
@ -0,0 +1,7 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE monitor
|
||||
ADD expiry_notification BOOLEAN default 1;
|
||||
|
||||
COMMIT;
|
@ -0,0 +1,23 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE proxy (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INT NOT NULL,
|
||||
protocol VARCHAR(10) NOT NULL,
|
||||
host VARCHAR(255) NOT NULL,
|
||||
port SMALLINT NOT NULL,
|
||||
auth BOOLEAN NOT NULL,
|
||||
username VARCHAR(255) NULL,
|
||||
password VARCHAR(255) NULL,
|
||||
active BOOLEAN NOT NULL DEFAULT 1,
|
||||
'default' BOOLEAN NOT NULL DEFAULT 0,
|
||||
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE monitor ADD COLUMN proxy_id INTEGER REFERENCES proxy(id);
|
||||
|
||||
CREATE INDEX proxy_id ON monitor (proxy_id);
|
||||
CREATE INDEX proxy_user_id ON proxy (user_id);
|
||||
|
||||
COMMIT;
|
@ -0,0 +1,6 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
ALTER TABLE status_page ADD footer_text TEXT;
|
||||
ALTER TABLE status_page ADD custom_css TEXT;
|
||||
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
|
||||
COMMIT;
|
@ -0,0 +1,31 @@
|
||||
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE [status_page](
|
||||
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
[slug] VARCHAR(255) NOT NULL UNIQUE,
|
||||
[title] VARCHAR(255) NOT NULL,
|
||||
[description] TEXT,
|
||||
[icon] VARCHAR(255) NOT NULL,
|
||||
[theme] VARCHAR(30) NOT NULL,
|
||||
[published] BOOLEAN NOT NULL DEFAULT 1,
|
||||
[search_engine_index] BOOLEAN NOT NULL DEFAULT 1,
|
||||
[show_tags] BOOLEAN NOT NULL DEFAULT 0,
|
||||
[password] VARCHAR,
|
||||
[created_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
[modified_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX [slug] ON [status_page]([slug]);
|
||||
|
||||
|
||||
CREATE TABLE [status_page_cname](
|
||||
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
[status_page_id] INTEGER NOT NULL REFERENCES [status_page]([id]) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
[domain] VARCHAR NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
ALTER TABLE incident ADD status_page_id INTEGER;
|
||||
ALTER TABLE [group] ADD status_page_id INTEGER;
|
||||
|
||||
COMMIT;
|
@ -1,8 +1,8 @@
|
||||
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
|
||||
FROM node:14-alpine3.12
|
||||
FROM node:16-alpine3.12
|
||||
WORKDIR /app
|
||||
|
||||
# Install apprise, iputils for non-root ping, setpriv
|
||||
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
|
||||
pip3 --no-cache-dir install apprise==0.9.6 && \
|
||||
pip3 --no-cache-dir install apprise==0.9.8 && \
|
||||
rm -rf /root/.cache
|
||||
|
@ -1,12 +1,26 @@
|
||||
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
|
||||
# If the image changed, the second stage image should be changed too
|
||||
FROM node:14-buster-slim
|
||||
FROM node:16-buster-slim
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Curl
|
||||
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
|
||||
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
|
||||
RUN apt update && \
|
||||
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
|
||||
sqlite3 iputils-ping util-linux dumb-init && \
|
||||
pip3 --no-cache-dir install apprise==0.9.6 && \
|
||||
pip3 --no-cache-dir install apprise==0.9.8 && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install cloudflared
|
||||
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583
|
||||
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js
|
||||
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
|
||||
dpkg --add-architecture arm && \
|
||||
apt update && \
|
||||
apt --yes --no-install-recommends install ./cloudflared.deb && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rm -f cloudflared.deb
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
module.exports = {
|
||||
apps: [{
|
||||
name: "uptime-kuma",
|
||||
script: "./server/server.js",
|
||||
}]
|
||||
}
|
||||
apps: [{
|
||||
name: "uptime-kuma",
|
||||
script: "./server/server.js",
|
||||
}]
|
||||
};
|
||||
|
@ -0,0 +1,62 @@
|
||||
const pkg = require("../../package.json");
|
||||
const fs = require("fs");
|
||||
const childProcess = require("child_process");
|
||||
const util = require("../../src/util");
|
||||
|
||||
util.polyfill();
|
||||
|
||||
const version = process.env.VERSION;
|
||||
|
||||
console.log("Beta Version: " + version);
|
||||
|
||||
if (!version || !version.includes("-beta.")) {
|
||||
console.error("invalid version, beta version only");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const exists = tagExists(version);
|
||||
|
||||
if (! exists) {
|
||||
// Process package.json
|
||||
pkg.version = version;
|
||||
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
|
||||
commit(version);
|
||||
tag(version);
|
||||
|
||||
} else {
|
||||
console.log("version tag exists, please delete the tag or use another tag");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function commit(version) {
|
||||
let msg = "Update to " + version;
|
||||
|
||||
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
|
||||
let stdout = res.stdout.toString().trim();
|
||||
console.log(stdout);
|
||||
|
||||
if (stdout.includes("no changes added to commit")) {
|
||||
throw new Error("commit error");
|
||||
}
|
||||
|
||||
res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
}
|
||||
|
||||
function tag(version) {
|
||||
let res = childProcess.spawnSync("git", [ "tag", version ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
|
||||
res = childProcess.spawnSync("git", [ "push", "origin", version ]);
|
||||
console.log(res.stdout.toString().trim());
|
||||
}
|
||||
|
||||
function tagExists(version) {
|
||||
if (! version) {
|
||||
throw new Error("invalid version");
|
||||
}
|
||||
|
||||
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
|
||||
|
||||
return res.stdout.toString().trim() === version;
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
//
|
||||
|
||||
const http = require("https"); // or 'https' for https:// URLs
|
||||
const fs = require("fs");
|
||||
|
||||
const platform = process.argv[2];
|
||||
|
||||
if (!platform) {
|
||||
console.error("No platform??");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let arch = null;
|
||||
|
||||
if (platform === "linux/amd64") {
|
||||
arch = "amd64";
|
||||
} else if (platform === "linux/arm64") {
|
||||
arch = "arm64";
|
||||
} else if (platform === "linux/arm/v7") {
|
||||
arch = "arm";
|
||||
} else {
|
||||
console.error("Invalid platform?? " + platform);
|
||||
}
|
||||
|
||||
const file = fs.createWriteStream("cloudflared.deb");
|
||||
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
|
||||
|
||||
function get(url) {
|
||||
http.get(url, function (res) {
|
||||
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
||||
console.log("Redirect to " + res.headers.location);
|
||||
get(res.headers.location);
|
||||
} else if (res.statusCode >= 200 && res.statusCode < 300) {
|
||||
res.pipe(file);
|
||||
|
||||
res.on("end", function () {
|
||||
console.log("Downloaded");
|
||||
});
|
||||
} else {
|
||||
console.error(res.statusCode);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const childProcess = require("child_process");
|
||||
let env = process.env;
|
||||
|
||||
let cmd = process.argv[2];
|
||||
let args = process.argv.slice(3);
|
||||
let replacedArgs = [];
|
||||
|
||||
for (let arg of args) {
|
||||
for (let key in env) {
|
||||
arg = arg.replaceAll(`$${key}`, env[key]);
|
||||
}
|
||||
replacedArgs.push(arg);
|
||||
}
|
||||
|
||||
let child = childProcess.spawn(cmd, replacedArgs);
|
||||
child.stdout.pipe(process.stdout);
|
||||
child.stderr.pipe(process.stderr);
|
@ -0,0 +1,23 @@
|
||||
const fs = require("fs");
|
||||
/**
|
||||
* Detect if `fs.rmSync` is available
|
||||
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
|
||||
* or the `recursive` property removing completely in the future Node.js version.
|
||||
* See the link below.
|
||||
*
|
||||
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
|
||||
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
|
||||
* @param {fs.PathLike} path Valid types for path values in "fs".
|
||||
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
|
||||
*/
|
||||
const rmSync = (path, options) => {
|
||||
if (typeof fs.rmSync === "function") {
|
||||
if (options.recursive) {
|
||||
options.force = true;
|
||||
}
|
||||
return fs.rmSync(path, options);
|
||||
}
|
||||
return fs.rmdirSync(path, options);
|
||||
};
|
||||
module.exports = rmSync;
|
@ -0,0 +1,6 @@
|
||||
console.log("Git Push and Publish the release note on github, then press any key to continue");
|
||||
|
||||
process.stdin.setRawMode(true);
|
||||
process.stdin.resume();
|
||||
process.stdin.on("data", process.exit.bind(process, 0));
|
||||
|
@ -0,0 +1,50 @@
|
||||
const { log } = require("../src/util");
|
||||
|
||||
const mqttUsername = "louis1";
|
||||
const mqttPassword = "!@#$LLam";
|
||||
|
||||
class SimpleMqttServer {
|
||||
aedes = require("aedes")();
|
||||
server = require("net").createServer(this.aedes.handle);
|
||||
|
||||
constructor(port) {
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
start() {
|
||||
this.server.listen(this.port, () => {
|
||||
console.log("server started and listening on port ", this.port);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let server1 = new SimpleMqttServer(10000);
|
||||
|
||||
server1.aedes.authenticate = function (client, username, password, callback) {
|
||||
if (username && password) {
|
||||
console.log(password.toString("utf-8"));
|
||||
callback(null, username === mqttUsername && password.toString("utf-8") === mqttPassword);
|
||||
} else {
|
||||
callback(null, false);
|
||||
}
|
||||
};
|
||||
|
||||
server1.aedes.on("subscribe", (subscriptions, client) => {
|
||||
console.log(subscriptions);
|
||||
|
||||
for (let s of subscriptions) {
|
||||
if (s.topic === "test") {
|
||||
server1.aedes.publish({
|
||||
topic: "test",
|
||||
payload: Buffer.from("ok"),
|
||||
}, (error) => {
|
||||
if (error) {
|
||||
log.error("mqtt_server", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
server1.start();
|
@ -0,0 +1,48 @@
|
||||
const childProcess = require("child_process");
|
||||
const fs = require("fs");
|
||||
|
||||
const newVersion = process.env.VERSION;
|
||||
|
||||
if (!newVersion) {
|
||||
console.log("Missing version");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
updateWiki(newVersion);
|
||||
|
||||
function updateWiki(newVersion) {
|
||||
const wikiDir = "./tmp/wiki";
|
||||
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
|
||||
|
||||
safeDelete(wikiDir);
|
||||
|
||||
childProcess.spawnSync("git", [ "clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir ]);
|
||||
let content = fs.readFileSync(howToUpdateFilename).toString();
|
||||
|
||||
// Replace the version: https://regex101.com/r/hmj2Bc/1
|
||||
content = content.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
|
||||
fs.writeFileSync(howToUpdateFilename, content);
|
||||
|
||||
childProcess.spawnSync("git", [ "add", "-A" ], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
childProcess.spawnSync("git", [ "commit", "-m", `Update to ${newVersion}` ], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
console.log("Pushing to Github");
|
||||
childProcess.spawnSync("git", [ "push" ], {
|
||||
cwd: wikiDir,
|
||||
});
|
||||
|
||||
safeDelete(wikiDir);
|
||||
}
|
||||
|
||||
function safeDelete(dir) {
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.rmdirSync(dir, {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,21 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
|
||||
class Proxy extends BeanModel {
|
||||
toJSON() {
|
||||
return {
|
||||
id: this._id,
|
||||
userId: this._user_id,
|
||||
protocol: this._protocol,
|
||||
host: this._host,
|
||||
port: this._port,
|
||||
auth: !!this._auth,
|
||||
username: this._username,
|
||||
password: this._password,
|
||||
active: !!this._active,
|
||||
default: !!this._default,
|
||||
createdDate: this._created_date,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Proxy;
|
@ -0,0 +1,132 @@
|
||||
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||
const { R } = require("redbean-node");
|
||||
|
||||
class StatusPage extends BeanModel {
|
||||
|
||||
static domainMappingList = { };
|
||||
|
||||
/**
|
||||
* Return object like this: { "test-uptime.kuma.pet": "default" }
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async loadDomainMappingList() {
|
||||
StatusPage.domainMappingList = await R.getAssoc(`
|
||||
SELECT domain, slug
|
||||
FROM status_page, status_page_cname
|
||||
WHERE status_page.id = status_page_cname.status_page_id
|
||||
`);
|
||||
}
|
||||
|
||||
static async sendStatusPageList(io, socket) {
|
||||
let result = {};
|
||||
|
||||
let list = await R.findAll("status_page", " ORDER BY title ");
|
||||
|
||||
for (let item of list) {
|
||||
result[item.id] = await item.toJSON();
|
||||
}
|
||||
|
||||
io.to(socket.userID).emit("statusPageList", result);
|
||||
return list;
|
||||
}
|
||||
|
||||
async updateDomainNameList(domainNameList) {
|
||||
|
||||
if (!Array.isArray(domainNameList)) {
|
||||
throw new Error("Invalid array");
|
||||
}
|
||||
|
||||
let trx = await R.begin();
|
||||
|
||||
await trx.exec("DELETE FROM status_page_cname WHERE status_page_id = ?", [
|
||||
this.id,
|
||||
]);
|
||||
|
||||
try {
|
||||
for (let domain of domainNameList) {
|
||||
if (typeof domain !== "string") {
|
||||
throw new Error("Invalid domain");
|
||||
}
|
||||
|
||||
if (domain.trim() === "") {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the domain name is used in another status page, delete it
|
||||
await trx.exec("DELETE FROM status_page_cname WHERE domain = ?", [
|
||||
domain,
|
||||
]);
|
||||
|
||||
let mapping = trx.dispense("status_page_cname");
|
||||
mapping.status_page_id = this.id;
|
||||
mapping.domain = domain;
|
||||
await trx.store(mapping);
|
||||
}
|
||||
await trx.commit();
|
||||
} catch (error) {
|
||||
await trx.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
getDomainNameList() {
|
||||
let domainList = [];
|
||||
for (let domain in StatusPage.domainMappingList) {
|
||||
let s = StatusPage.domainMappingList[domain];
|
||||
|
||||
if (this.slug === s) {
|
||||
domainList.push(domain);
|
||||
}
|
||||
}
|
||||
return domainList;
|
||||
}
|
||||
|
||||
async toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
slug: this.slug,
|
||||
title: this.title,
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
theme: this.theme,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
domainNameList: this.getDomainNameList(),
|
||||
customCSS: this.custom_css,
|
||||
footerText: this.footer_text,
|
||||
showPoweredBy: !!this.show_powered_by,
|
||||
};
|
||||
}
|
||||
|
||||
async toPublicJSON() {
|
||||
return {
|
||||
slug: this.slug,
|
||||
title: this.title,
|
||||
description: this.description,
|
||||
icon: this.getIcon(),
|
||||
theme: this.theme,
|
||||
published: !!this.published,
|
||||
showTags: !!this.show_tags,
|
||||
customCSS: this.custom_css,
|
||||
footerText: this.footer_text,
|
||||
showPoweredBy: !!this.show_powered_by,
|
||||
};
|
||||
}
|
||||
|
||||
static async slugToID(slug) {
|
||||
return await R.getCell("SELECT id FROM status_page WHERE slug = ? ", [
|
||||
slug
|
||||
]);
|
||||
}
|
||||
|
||||
getIcon() {
|
||||
if (!this.icon) {
|
||||
return "/icon.svg";
|
||||
} else {
|
||||
return this.icon;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = StatusPage;
|
@ -0,0 +1,67 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
const axios = require("axios");
|
||||
|
||||
class Alerta extends NotificationProvider {
|
||||
|
||||
name = "alerta";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
let alertaUrl = `${notification.alertaApiEndpoint}`;
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json;charset=UTF-8",
|
||||
"Authorization": "Key " + notification.alertaApiKey,
|
||||
}
|
||||
};
|
||||
let data = {
|
||||
environment: notification.alertaEnvironment,
|
||||
severity: "critical",
|
||||
correlate: [],
|
||||
service: [ "UptimeKuma" ],
|
||||
value: "Timeout",
|
||||
tags: [ "uptimekuma" ],
|
||||
attributes: {},
|
||||
origin: "uptimekuma",
|
||||
type: "exceptionAlert",
|
||||
};
|
||||
|
||||
if (heartbeatJSON == null) {
|
||||
let postData = Object.assign({
|
||||
event: "msg",
|
||||
text: msg,
|
||||
group: "uptimekuma-msg",
|
||||
resource: "Message",
|
||||
}, data);
|
||||
|
||||
await axios.post(alertaUrl, postData, config);
|
||||
} else {
|
||||
let datadup = Object.assign( {
|
||||
correlate: [ "service_up", "service_down" ],
|
||||
event: monitorJSON["type"],
|
||||
group: "uptimekuma-" + monitorJSON["type"],
|
||||
resource: monitorJSON["name"],
|
||||
}, data );
|
||||
|
||||
if (heartbeatJSON["status"] === DOWN) {
|
||||
datadup.severity = notification.alertaAlertState; // critical
|
||||
datadup.text = "Service " + monitorJSON["type"] + " is down.";
|
||||
await axios.post(alertaUrl, datadup, config);
|
||||
} else if (heartbeatJSON["status"] === UP) {
|
||||
datadup.severity = notification.alertaRecoverState; // cleaned
|
||||
datadup.text = "Service " + monitorJSON["type"] + " is up.";
|
||||
await axios.post(alertaUrl, datadup, config);
|
||||
}
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Alerta;
|
@ -0,0 +1,42 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class Gorush extends NotificationProvider {
|
||||
|
||||
name = "gorush";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
let platformMapping = {
|
||||
"ios": 1,
|
||||
"android": 2,
|
||||
"huawei": 3,
|
||||
};
|
||||
|
||||
try {
|
||||
let data = {
|
||||
"notifications": [
|
||||
{
|
||||
"tokens": [ notification.gorushDeviceToken ],
|
||||
"platform": platformMapping[notification.gorushPlatform],
|
||||
"message": msg,
|
||||
// Optional
|
||||
"title": notification.gorushTitle,
|
||||
"priority": notification.gorushPriority,
|
||||
"retry": parseInt(notification.gorushRetry) || 0,
|
||||
"topic": notification.gorushTopic,
|
||||
}
|
||||
]
|
||||
};
|
||||
let config = {};
|
||||
|
||||
await axios.post(`${notification.gorushServerURL}/api/push`, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Gorush;
|
@ -0,0 +1,45 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class OneBot extends NotificationProvider {
|
||||
|
||||
name = "OneBot";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
try {
|
||||
let httpAddr = notification.httpAddr;
|
||||
if (!httpAddr.startsWith("http")) {
|
||||
httpAddr = "http://" + httpAddr;
|
||||
}
|
||||
if (!httpAddr.endsWith("/")) {
|
||||
httpAddr += "/";
|
||||
}
|
||||
let onebotAPIUrl = httpAddr + "send_msg";
|
||||
let config = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer " + notification.accessToken,
|
||||
}
|
||||
};
|
||||
let pushText = "UptimeKuma Alert: " + msg;
|
||||
let data = {
|
||||
"auto_escape": true,
|
||||
"message": pushText,
|
||||
};
|
||||
if (notification.msgType === "group") {
|
||||
data["message_type"] = "group";
|
||||
data["group_id"] = notification.recieverId;
|
||||
} else {
|
||||
data["message_type"] = "private";
|
||||
data["user_id"] = notification.recieverId;
|
||||
}
|
||||
await axios.post(onebotAPIUrl, data, config);
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OneBot;
|
@ -0,0 +1,52 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
const { DOWN, UP } = require("../../src/util");
|
||||
|
||||
class PushDeer extends NotificationProvider {
|
||||
|
||||
name = "PushDeer";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
let pushdeerlink = "https://api2.pushdeer.com/message/push";
|
||||
|
||||
let valid = msg != null && monitorJSON != null && heartbeatJSON != null;
|
||||
|
||||
let title;
|
||||
if (valid && heartbeatJSON.status === UP) {
|
||||
title = "## Uptime Kuma: " + monitorJSON.name + " up";
|
||||
} else if (valid && heartbeatJSON.status === DOWN) {
|
||||
title = "## Uptime Kuma: " + monitorJSON.name + " down";
|
||||
} else {
|
||||
title = "## Uptime Kuma Message";
|
||||
}
|
||||
|
||||
let data = {
|
||||
"pushkey": notification.pushdeerKey,
|
||||
"text": title,
|
||||
"desp": msg.replace(/\n/g, "\n\n"),
|
||||
"type": "markdown",
|
||||
};
|
||||
|
||||
try {
|
||||
let res = await axios.post(pushdeerlink, data);
|
||||
|
||||
if ("error" in res.data) {
|
||||
let error = res.data.error;
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
if (res.data.content.result.length === 0) {
|
||||
let error = "Invalid PushDeer key";
|
||||
this.throwGeneralAxiosError(error);
|
||||
} else if (JSON.parse(res.data.content.result[0]).success !== "ok") {
|
||||
let error = "Unknown error";
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PushDeer;
|
@ -0,0 +1,23 @@
|
||||
const NotificationProvider = require("./notification-provider");
|
||||
const axios = require("axios");
|
||||
|
||||
class TechulusPush extends NotificationProvider {
|
||||
|
||||
name = "PushByTechulus";
|
||||
|
||||
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||
let okMsg = "Sent Successfully.";
|
||||
|
||||
try {
|
||||
await axios.post(`https://push.techulus.com/api/v1/notify/${notification.pushAPIKey}`, {
|
||||
"title": "Uptime-Kuma",
|
||||
"body": msg,
|
||||
});
|
||||
return okMsg;
|
||||
} catch (error) {
|
||||
this.throwGeneralAxiosError(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TechulusPush;
|
@ -0,0 +1,189 @@
|
||||
const { R } = require("redbean-node");
|
||||
const HttpProxyAgent = require("http-proxy-agent");
|
||||
const HttpsProxyAgent = require("https-proxy-agent");
|
||||
const SocksProxyAgent = require("socks-proxy-agent");
|
||||
const { debug } = require("../src/util");
|
||||
const { UptimeKumaServer } = require("./uptime-kuma-server");
|
||||
|
||||
class Proxy {
|
||||
|
||||
static SUPPORTED_PROXY_PROTOCOLS = [ "http", "https", "socks", "socks5", "socks4" ]
|
||||
|
||||
/**
|
||||
* Saves and updates given proxy entity
|
||||
*
|
||||
* @param proxy
|
||||
* @param proxyID
|
||||
* @param userID
|
||||
* @return {Promise<Bean>}
|
||||
*/
|
||||
static async save(proxy, proxyID, userID) {
|
||||
let bean;
|
||||
|
||||
if (proxyID) {
|
||||
bean = await R.findOne("proxy", " id = ? AND user_id = ? ", [ proxyID, userID ]);
|
||||
|
||||
if (!bean) {
|
||||
throw new Error("proxy not found");
|
||||
}
|
||||
|
||||
} else {
|
||||
bean = R.dispense("proxy");
|
||||
}
|
||||
|
||||
// Make sure given proxy protocol is supported
|
||||
if (!this.SUPPORTED_PROXY_PROTOCOLS.includes(proxy.protocol)) {
|
||||
throw new Error(`
|
||||
Unsupported proxy protocol "${proxy.protocol}.
|
||||
Supported protocols are ${this.SUPPORTED_PROXY_PROTOCOLS.join(", ")}."`
|
||||
);
|
||||
}
|
||||
|
||||
// When proxy is default update deactivate old default proxy
|
||||
if (proxy.default) {
|
||||
await R.exec("UPDATE proxy SET `default` = 0 WHERE `default` = 1");
|
||||
}
|
||||
|
||||
bean.user_id = userID;
|
||||
bean.protocol = proxy.protocol;
|
||||
bean.host = proxy.host;
|
||||
bean.port = proxy.port;
|
||||
bean.auth = proxy.auth;
|
||||
bean.username = proxy.username;
|
||||
bean.password = proxy.password;
|
||||
bean.active = proxy.active || true;
|
||||
bean.default = proxy.default || false;
|
||||
|
||||
await R.store(bean);
|
||||
|
||||
if (proxy.applyExisting) {
|
||||
await applyProxyEveryMonitor(bean.id, userID);
|
||||
}
|
||||
|
||||
return bean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes proxy with given id and removes it from monitors
|
||||
*
|
||||
* @param proxyID
|
||||
* @param userID
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
static async delete(proxyID, userID) {
|
||||
const bean = await R.findOne("proxy", " id = ? AND user_id = ? ", [ proxyID, userID ]);
|
||||
|
||||
if (!bean) {
|
||||
throw new Error("proxy not found");
|
||||
}
|
||||
|
||||
// Delete removed proxy from monitors if exists
|
||||
await R.exec("UPDATE monitor SET proxy_id = null WHERE proxy_id = ?", [ proxyID ]);
|
||||
|
||||
// Delete proxy from list
|
||||
await R.trash(bean);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create HTTP and HTTPS agents related with given proxy bean object
|
||||
*
|
||||
* @param proxy proxy bean object
|
||||
* @param options http and https agent options
|
||||
* @return {{httpAgent: Agent, httpsAgent: Agent}}
|
||||
*/
|
||||
static createAgents(proxy, options) {
|
||||
const { httpAgentOptions, httpsAgentOptions } = options || {};
|
||||
let agent;
|
||||
let httpAgent;
|
||||
let httpsAgent;
|
||||
|
||||
const proxyOptions = {
|
||||
protocol: proxy.protocol,
|
||||
host: proxy.host,
|
||||
port: proxy.port,
|
||||
};
|
||||
|
||||
if (proxy.auth) {
|
||||
proxyOptions.auth = `${proxy.username}:${proxy.password}`;
|
||||
}
|
||||
|
||||
debug(`Proxy Options: ${JSON.stringify(proxyOptions)}`);
|
||||
debug(`HTTP Agent Options: ${JSON.stringify(httpAgentOptions)}`);
|
||||
debug(`HTTPS Agent Options: ${JSON.stringify(httpsAgentOptions)}`);
|
||||
|
||||
switch (proxy.protocol) {
|
||||
case "http":
|
||||
case "https":
|
||||
httpAgent = new HttpProxyAgent({
|
||||
...httpAgentOptions || {},
|
||||
...proxyOptions
|
||||
});
|
||||
|
||||
httpsAgent = new HttpsProxyAgent({
|
||||
...httpsAgentOptions || {},
|
||||
...proxyOptions,
|
||||
});
|
||||
break;
|
||||
case "socks":
|
||||
case "socks5":
|
||||
case "socks4":
|
||||
agent = new SocksProxyAgent({
|
||||
...httpAgentOptions,
|
||||
...httpsAgentOptions,
|
||||
...proxyOptions,
|
||||
});
|
||||
|
||||
httpAgent = agent;
|
||||
httpsAgent = agent;
|
||||
break;
|
||||
|
||||
default: throw new Error(`Unsupported proxy protocol provided. ${proxy.protocol}`);
|
||||
}
|
||||
|
||||
return {
|
||||
httpAgent,
|
||||
httpsAgent
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reload proxy settings for current monitors
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
static async reloadProxy() {
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
|
||||
let updatedList = await R.getAssoc("SELECT id, proxy_id FROM monitor");
|
||||
|
||||
for (let monitorID in server.monitorList) {
|
||||
let monitor = server.monitorList[monitorID];
|
||||
|
||||
if (updatedList[monitorID]) {
|
||||
monitor.proxy_id = updatedList[monitorID].proxy_id;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies given proxy id to monitors
|
||||
*
|
||||
* @param proxyID
|
||||
* @param userID
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async function applyProxyEveryMonitor(proxyID, userID) {
|
||||
// Find all monitors with id and proxy id
|
||||
const monitors = await R.getAll("SELECT id, proxy_id FROM monitor WHERE user_id = ?", [ userID ]);
|
||||
|
||||
// Update proxy id not match with given proxy id
|
||||
for (const monitor of monitors) {
|
||||
if (monitor.proxy_id !== proxyID) {
|
||||
await R.exec("UPDATE monitor SET proxy_id = ? WHERE id = ?", [ proxyID, monitor.id ]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Proxy,
|
||||
};
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,93 @@
|
||||
const { checkLogin, setSetting, setting, doubleCheckPassword } = require("../util-server");
|
||||
const { CloudflaredTunnel } = require("node-cloudflared-tunnel");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const io = UptimeKumaServer.getInstance().io;
|
||||
|
||||
const prefix = "cloudflared_";
|
||||
const cloudflared = new CloudflaredTunnel();
|
||||
|
||||
cloudflared.change = (running, message) => {
|
||||
io.to("cloudflared").emit(prefix + "running", running);
|
||||
io.to("cloudflared").emit(prefix + "message", message);
|
||||
};
|
||||
|
||||
cloudflared.error = (errorMessage) => {
|
||||
io.to("cloudflared").emit(prefix + "errorMessage", errorMessage);
|
||||
};
|
||||
|
||||
module.exports.cloudflaredSocketHandler = (socket) => {
|
||||
|
||||
socket.on(prefix + "join", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
socket.join("cloudflared");
|
||||
io.to(socket.userID).emit(prefix + "installed", cloudflared.checkInstalled());
|
||||
io.to(socket.userID).emit(prefix + "running", cloudflared.running);
|
||||
io.to(socket.userID).emit(prefix + "token", await setting("cloudflaredTunnelToken"));
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
socket.on(prefix + "leave", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
socket.leave("cloudflared");
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
socket.on(prefix + "start", async (token) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
if (token && typeof token === "string") {
|
||||
await setSetting("cloudflaredTunnelToken", token);
|
||||
cloudflared.token = token;
|
||||
} else {
|
||||
cloudflared.token = null;
|
||||
}
|
||||
cloudflared.start();
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
socket.on(prefix + "stop", async (currentPassword, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
await doubleCheckPassword(socket, currentPassword);
|
||||
cloudflared.stop();
|
||||
} catch (error) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: error.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
socket.on(prefix + "removeToken", async () => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
await setSetting("cloudflaredTunnelToken", "");
|
||||
} catch (error) { }
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports.autoStart = async (token) => {
|
||||
if (!token) {
|
||||
token = await setting("cloudflaredTunnelToken");
|
||||
} else {
|
||||
// Override the current token via args or env var
|
||||
await setSetting("cloudflaredTunnelToken", token);
|
||||
console.log("Use cloudflared token from args or env var");
|
||||
}
|
||||
|
||||
if (token) {
|
||||
console.log("Start cloudflared");
|
||||
cloudflared.token = token;
|
||||
cloudflared.start();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.stop = async () => {
|
||||
console.log("Stop cloudflared");
|
||||
if (cloudflared) {
|
||||
cloudflared.stop();
|
||||
}
|
||||
};
|
@ -0,0 +1,54 @@
|
||||
const { checkLogin } = require("../util-server");
|
||||
const { Proxy } = require("../proxy");
|
||||
const { sendProxyList } = require("../client");
|
||||
const { UptimeKumaServer } = require("../uptime-kuma-server");
|
||||
const server = UptimeKumaServer.getInstance();
|
||||
|
||||
module.exports.proxySocketHandler = (socket) => {
|
||||
socket.on("addProxy", async (proxy, proxyID, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
const proxyBean = await Proxy.save(proxy, proxyID, socket.userID);
|
||||
await sendProxyList(socket);
|
||||
|
||||
if (proxy.applyExisting) {
|
||||
await Proxy.reloadProxy();
|
||||
await server.sendMonitorList(socket);
|
||||
}
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Saved",
|
||||
id: proxyBean.id,
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("deleteProxy", async (proxyID, callback) => {
|
||||
try {
|
||||
checkLogin(socket);
|
||||
|
||||
await Proxy.delete(proxyID, socket.userID);
|
||||
await sendProxyList(socket);
|
||||
await Proxy.reloadProxy();
|
||||
|
||||
callback({
|
||||
ok: true,
|
||||
msg: "Deleted",
|
||||
});
|
||||
|
||||
} catch (e) {
|
||||
callback({
|
||||
ok: false,
|
||||
msg: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
@ -0,0 +1,90 @@
|
||||
const express = require("express");
|
||||
const https = require("https");
|
||||
const fs = require("fs");
|
||||
const http = require("http");
|
||||
const { Server } = require("socket.io");
|
||||
const { R } = require("redbean-node");
|
||||
const { log } = require("../src/util");
|
||||
|
||||
/**
|
||||
* `module.exports` (alias: `server`) should be inside this class, in order to avoid circular dependency issue.
|
||||
* @type {UptimeKumaServer}
|
||||
*/
|
||||
class UptimeKumaServer {
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {UptimeKumaServer}
|
||||
*/
|
||||
static instance = null;
|
||||
|
||||
/**
|
||||
* Main monitor list
|
||||
* @type {{}}
|
||||
*/
|
||||
monitorList = {};
|
||||
entryPage = "dashboard";
|
||||
app = undefined;
|
||||
httpServer = undefined;
|
||||
io = undefined;
|
||||
|
||||
static getInstance(args) {
|
||||
if (UptimeKumaServer.instance == null) {
|
||||
UptimeKumaServer.instance = new UptimeKumaServer(args);
|
||||
}
|
||||
return UptimeKumaServer.instance;
|
||||
}
|
||||
|
||||
constructor(args) {
|
||||
// SSL
|
||||
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||
|
||||
log.info("server", "Creating express and socket.io instance");
|
||||
this.app = express();
|
||||
|
||||
if (sslKey && sslCert) {
|
||||
log.info("server", "Server Type: HTTPS");
|
||||
this.httpServer = https.createServer({
|
||||
key: fs.readFileSync(sslKey),
|
||||
cert: fs.readFileSync(sslCert)
|
||||
}, this.app);
|
||||
} else {
|
||||
log.info("server", "Server Type: HTTP");
|
||||
this.httpServer = http.createServer(this.app);
|
||||
}
|
||||
|
||||
this.io = new Server(this.httpServer);
|
||||
}
|
||||
|
||||
async sendMonitorList(socket) {
|
||||
let list = await this.getMonitorJSONList(socket.userID);
|
||||
this.io.to(socket.userID).emit("monitorList", list);
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of monitors for the given user.
|
||||
* @param {string} userID - The ID of the user to get monitors for.
|
||||
* @returns {Promise<Object>} A promise that resolves to an object with monitor IDs as keys and monitor objects as values.
|
||||
*
|
||||
* Generated by Trelent
|
||||
*/
|
||||
async getMonitorJSONList(userID) {
|
||||
let result = {};
|
||||
|
||||
let monitorList = await R.find("monitor", " user_id = ? ORDER BY weight DESC, name", [
|
||||
userID,
|
||||
]);
|
||||
|
||||
for (let monitor of monitorList) {
|
||||
result[monitor.id] = await monitor.toJSON();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UptimeKumaServer
|
||||
};
|
@ -1,12 +1,12 @@
|
||||
<template>
|
||||
<router-view />
|
||||
<router-view />
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { setPageLocale } from "./util-frontend";
|
||||
export default {
|
||||
created() {
|
||||
setPageLocale();
|
||||
},
|
||||
created() {
|
||||
setPageLocale();
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue