Merge remote-tracking branch 'origin/master' into patch-1_k8s

pull/2083/head
Louis Lam 2 years ago
commit 18ae6fa6c1

@ -31,6 +31,9 @@ tsconfig.json
/tmp /tmp
/babel.config.js /babel.config.js
/ecosystem.config.js /ecosystem.config.js
/extra/healthcheck.exe
/extra/healthcheck
### .gitignore content (commented rules are duplicated) ### .gitignore content (commented rules are duplicated)

@ -19,3 +19,6 @@ indent_size = 2
[*.vue] [*.vue]
trim_trailing_whitespace = false trim_trailing_whitespace = false
[*.go]
indent_style = tab

@ -1,4 +1,8 @@
👉 Delete this line if you have read and agree our pull request rules and guidelines: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma ⚠️⚠️⚠️ Since we do not accept all types of pull requests and do not want to waste your time. Please be sure that you have read pull request rules:
https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma
Tick the checkbox if you understand [x]:
- [ ] I have read and understand the pull request rules.
# Description # Description
@ -12,7 +16,6 @@ Please delete any options that are not relevant.
- User interface (UI) - User interface (UI)
- New feature (non-breaking change which adds functionality) - New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing functionality to not work as expected) - Breaking change (fix or feature that would cause existing functionality to not work as expected)
- Translation update
- Other - Other
- This change requires a documentation update - This change requires a documentation update

@ -18,7 +18,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: [macos-latest, ubuntu-latest, windows-latest] os: [macos-latest, ubuntu-latest, windows-latest]
node: [ 14, 16, 17, 18 ] node: [ 14, 16, 18, 19 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps: steps:
@ -66,3 +66,19 @@ jobs:
- run: npm install - run: npm install
- run: npm run build - run: npm run build
- run: npm run cy:test - run: npm run cy:test
frontend-unit-tests:
needs: [ check-linters ]
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run build
- run: npm run cy:run:unit

@ -1,8 +1,9 @@
name: 'Automatically close stale issues and PRs' name: 'Automatically close stale issues and PRs'
on: on:
workflow_dispatch:
schedule: schedule:
- cron: '0 0 * * *' - cron: '0 */6 * * *'
#Run once a day at midnight #Run every 6 hours
jobs: jobs:
stale: stale:
@ -10,13 +11,12 @@ jobs:
steps: steps:
- uses: actions/stale@v5 - uses: actions/stale@v5
with: with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 7 days.' stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 7 days.' close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.'
close-pr-message: 'This PR was closed because it has been stalled for 7 days with no activity.'
days-before-stale: 90 days-before-stale: 90
days-before-close: 7 days-before-close: 2
days-before-pr-stale: 999999999
days-before-pr-close: 1
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request' exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,feature-request'
exempt-issue-assignees: 'louislam' exempt-issue-assignees: 'louislam'
exempt-pr-assignees: 'louislam' operations-per-run: 200

4
.gitignore vendored

@ -16,3 +16,7 @@ dist-ssr
cypress/videos cypress/videos
cypress/screenshots cypress/screenshots
/extra/healthcheck.exe
/extra/healthcheck
/extra/healthcheck-armv7

@ -1,6 +1,6 @@
# Project Info # Project Info
First of all, thank you everyone who made pull requests for Uptime Kuma, I never thought GitHub Community can be that nice! And also because of this, I also never thought other people actually read my code and edit my code. It is not structured and commented so well, lol. Sorry about that. First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json. The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json.
@ -27,28 +27,40 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
## Can I create a pull request for Uptime Kuma? ## Can I create a pull request for Uptime Kuma?
Yes, you can. However, since I don't want to waste your time, be sure to **create empty draft pull request, so we can discuss first** if it is a large pull request or you don't know it will be merged or not. Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests. Here are some references:
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
✅ Accept: ✅ Usually Accept:
- Bug/Security fix - Bug fix
- Translations - Security fix
- Adding notification providers - Adding notification providers
- Adding new language files (You should go to https://weblate.kuma.pet for existing languages)
- Adding new language keys: `$t("...")`
⚠️ Discussion First ⚠️ Discussion First
- Large pull requests - Large pull requests
- New features - New features
❌ Won't Merge ❌ Won't Merge
- A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
- Do not pass auto test - Do not pass auto test
- Any breaking changes - Any breaking changes
- Duplicated pull request - Duplicated pull request
- Buggy - Buggy
- UI/UX is not close to Uptime Kuma
- Existing logic is completely modified or deleted for no reason - Existing logic is completely modified or deleted for no reason
- A function that is completely out of scope - A function that is completely out of scope
- Convert existing code into other programming languages
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests)
The above cases cannot cover all situations.
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
### Recommended Pull Request Guideline ### Recommended Pull Request Guideline
@ -68,9 +80,9 @@ Before deep into coding, discussion first is preferred. Creating an empty pull r
## Project Styles ## Project Styles
I personally do not like something need to learn so much and need to config so much before you can finally start the app. I personally do not like it when something requires so much learning and configuration before you can finally start the app.
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run - Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort required to get it running
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go - Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`. - Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`.
- Easy to use - Easy to use
@ -168,16 +180,23 @@ The data and socket logic are in `src/mixins/socket.js`.
## Unit Test ## Unit Test
It is an end-to-end testing. It is using Jest and Puppeteer.
```bash ```bash
npm run build npm run build
npm test npm test
``` ```
By default, the Chromium window will be shown up during the test. Specifying `HEADLESS_TEST=1` for terminal environments. ## Dependencies
Both frontend and backend share the same package.json. However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into dist files. So:
- Frontend dependencies = "devDependencies"
- Examples: vue, chart.js
- Backend dependencies = "dependencies"
- Examples: socket.io, sqlite3
- Development dependencies = "devDependencies"
- Examples: eslint, sass
## Update Dependencies ### Update Dependencies
Install `ncu` Install `ncu`
https://github.com/raineorshine/npm-check-updates https://github.com/raineorshine/npm-check-updates

@ -1,39 +1,40 @@
# Uptime Kuma # Uptime Kuma
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) [![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/engage/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a>
<div align="center" width="100%"> <div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" /> <img src="./public/icon.svg" width="128" alt="" />
</div> </div>
It is a self-hosted monitoring tool like "Uptime Robot". Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<img src="https://uptime.kuma.pet/img/dark.jpg" width="700" alt="" /> <img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
## 🥔 Live Demo ## 🥔 Live Demo
Try it! Try it!
https://demo.uptime.kuma.pet - Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors))
- Europe Demo Server: https://demo.uptime-kuma.karimi.dev:27000 (Provided by [@mhkarimi1383](https://github.com/mhkarimi1383))
It is a temporary live demo, all data will be deleted after 10 minutes. The server is located in Tokyo, so if you live far from there, it may affect your experience. I suggest that you should install and try it out for the best demo experience. It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience.
VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollective.com/uptime-kuma)! Thank you so much!
## ⭐ Features ## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers. * Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Fancy, Reactive, Fast UI/UX. * Fancy, Reactive, Fast UI/UX
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications). * Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
* 20 second intervals. * 20 second intervals
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages) * [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
* Multiple Status Pages * Multiple status pages
* Map Status Page to Domain * Map status pages to specific domains
* Ping Chart * Ping chart
* Certificate Info * Certificate info
* Proxy Support * Proxy support
* 2FA available * 2FA support
## 🔧 How to Install ## 🔧 How to Install
@ -45,14 +46,14 @@ docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name upti
⚠️ Please use a **local volume** only. Other types such as NFS are not supported. ⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
Browse to http://localhost:3001 after starting. Uptime Kuma is now running on http://localhost:3001
### 💪🏻 Non-Docker ### 💪🏻 Non-Docker
Required Tools: Required Tools:
- [Node.js](https://nodejs.org/en/download/) >= 14 - [Node.js](https://nodejs.org/en/download/) >= 14
- [Git](https://git-scm.com/downloads) - [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For run in background - [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
```bash ```bash
# Update your npm to the latest version # Update your npm to the latest version
@ -74,7 +75,7 @@ pm2 start server/server.js --name uptime-kuma
``` ```
Browse to http://localhost:3001 after starting. Uptime Kuma is now running on http://localhost:3001
More useful PM2 Commands More useful PM2 Commands
@ -172,9 +173,9 @@ Check out the latest beta release here: https://github.com/louislam/uptime-kuma/
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues). If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Translations ### Translations
If you want to translate Uptime Kuma into your language, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great. Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
### Create Pull Requests ### Create Pull Requests
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

@ -2,9 +2,9 @@
## Reporting a Vulnerability ## Reporting a Vulnerability
Please report security issues to uptime@kuma.pet. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
Do not use the issue tracker or discuss it in the public as it will cause more damage. Do not use the public issue tracker or discuss it in the public as it will cause more damage.
## Supported Versions ## Supported Versions

@ -0,0 +1,28 @@
const { defineConfig } = require("cypress");
module.exports = defineConfig({
projectId: "vyjuem",
e2e: {
experimentalStudio: true,
setupNodeEvents(on, config) {
},
fixturesFolder: "test/cypress/fixtures",
screenshotsFolder: "test/cypress/screenshots",
videosFolder: "test/cypress/videos",
downloadsFolder: "test/cypress/downloads",
supportFile: "test/cypress/support/e2e.js",
baseUrl: "http://localhost:3002",
defaultCommandTimeout: 10000,
pageLoadTimeout: 60000,
viewportWidth: 1920,
viewportHeight: 1080,
specPattern: [
"test/cypress/e2e/setup.cy.js",
"test/cypress/e2e/**/*.js"
],
},
env: {
baseUrl: "http://localhost:3002",
},
});

@ -0,0 +1,10 @@
const { defineConfig } = require("cypress");
module.exports = defineConfig({
e2e: {
supportFile: false,
specPattern: [
"test/cypress/unit/**/*.js"
],
}
});

@ -1,33 +0,0 @@
const PuppeteerEnvironment = require("jest-environment-puppeteer");
const util = require("util");
class DebugEnv extends PuppeteerEnvironment {
async handleTestEvent(event, state) {
const ignoredEvents = [
"setup",
"add_hook",
"start_describe_definition",
"add_test",
"finish_describe_definition",
"run_start",
"run_describe_start",
"test_start",
"hook_start",
"hook_success",
"test_fn_start",
"test_fn_success",
"test_done",
"run_describe_finish",
"run_finish",
"teardown",
"test_fn_failure",
];
if (!ignoredEvents.includes(event.name)) {
console.log(
new Date().toString() + ` Unhandled event [${event.name}] ` + util.inspect(event)
);
}
}
}
module.exports = DebugEnv;

@ -1,5 +0,0 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/frontend.spec.js",
};

@ -1,20 +0,0 @@
module.exports = {
"launch": {
"dumpio": true,
"slowMo": 500,
"headless": process.env.HEADLESS_TEST || false,
"userDataDir": "./data/test-chrome-profile",
args: [
"--disable-setuid-sandbox",
"--disable-gpu",
"--disable-dev-shm-usage",
"--no-default-browser-check",
"--no-experiments",
"--no-first-run",
"--no-pings",
"--no-sandbox",
"--no-zygote",
"--single-process",
],
}
};

@ -1,12 +0,0 @@
module.exports = {
"verbose": true,
"preset": "jest-puppeteer",
"globals": {
"__DEV__": true
},
"testRegex": "./test/e2e.spec.js",
"testEnvironment": "./config/jest-debug-env.js",
"rootDir": "..",
"testTimeout": 30000,
};

@ -1,15 +0,0 @@
import { defineConfig } from "cypress";
export default defineConfig({
e2e: {
baseUrl: "http://localhost:3002",
defaultCommandTimeout: 10000,
pageLoadTimeout: 60000,
viewportWidth: 1920,
viewportHeight: 1080,
specPattern: ["cypress/e2e/setup.cy.ts", "cypress/e2e/**/*.ts"],
},
env: {
baseUrl: "http://localhost:3002",
},
});

@ -1,24 +0,0 @@
import { actor } from "../support/actors/actor";
import { DEFAULT_USER_DATA } from "../support/const/user-data";
import { DashboardPage } from "../support/pages/dasboard-page";
import { SetupPage } from "../support/pages/setup-page";
describe("user can create a new account on setup page", () => {
before(() => {
cy.visit("/setup");
});
it("user can create new account", () => {
cy.url().should("be.equal", SetupPage.url);
actor.setupTask.fillAndSubmitSetupForm(
DEFAULT_USER_DATA.username,
DEFAULT_USER_DATA.password,
DEFAULT_USER_DATA.password
);
cy.url().should("be.equal", DashboardPage.url);
cy.get('[role="alert"]')
.should("be.visible")
.and("contain.text", "Added Successfully.");
});
});

@ -1,8 +0,0 @@
import { SetupTask } from "../tasks/setup-task";
class Actor {
setupTask: SetupTask = new SetupTask();
}
const actor = new Actor();
export { actor };

@ -1 +0,0 @@
import "./commands";

@ -1,15 +0,0 @@
import { SetupPage } from "../pages/setup-page";
export class SetupTask {
fillAndSubmitSetupForm(
username: string,
password: string,
passwordRepeat: string
) {
cy.get(SetupPage.usernameInput).type(username);
cy.get(SetupPage.passWordInput).type(password);
cy.get(SetupPage.passwordRepeatInput).type(passwordRepeat);
cy.get(SetupPage.submitSetupForm).click();
}
}

@ -0,0 +1,5 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD game VARCHAR(255);
COMMIT

@ -0,0 +1,25 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD grpc_url VARCHAR(255) default null;
ALTER TABLE monitor
ADD grpc_protobuf TEXT default null;
ALTER TABLE monitor
ADD grpc_body TEXT default null;
ALTER TABLE monitor
ADD grpc_metadata TEXT default null;
ALTER TABLE monitor
ADD grpc_method VARCHAR(255) default null;
ALTER TABLE monitor
ADD grpc_service_name VARCHAR(255) default null;
ALTER TABLE monitor
ADD grpc_enable_tls BOOLEAN default 0 not null;
COMMIT;

@ -0,0 +1,83 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Just for someone who tested maintenance before (patch-maintenance-table.sql)
DROP TABLE IF EXISTS maintenance_status_page;
DROP TABLE IF EXISTS monitor_maintenance;
DROP TABLE IF EXISTS maintenance;
DROP TABLE IF EXISTS maintenance_timeslot;
-- maintenance
CREATE TABLE [maintenance] (
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[title] VARCHAR(150) NOT NULL,
[description] TEXT NOT NULL,
[user_id] INTEGER REFERENCES [user]([id]) ON DELETE SET NULL ON UPDATE CASCADE,
[active] BOOLEAN NOT NULL DEFAULT 1,
[strategy] VARCHAR(50) NOT NULL DEFAULT 'single',
[start_date] DATETIME,
[end_date] DATETIME,
[start_time] TIME,
[end_time] TIME,
[weekdays] VARCHAR2(250) DEFAULT '[]',
[days_of_month] TEXT DEFAULT '[]',
[interval_day] INTEGER
);
CREATE INDEX [manual_active] ON [maintenance] (
[strategy],
[active]
);
CREATE INDEX [active] ON [maintenance] ([active]);
CREATE INDEX [maintenance_user_id] ON [maintenance] ([user_id]);
-- maintenance_status_page
CREATE TABLE maintenance_status_page (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
status_page_id INTEGER NOT NULL,
maintenance_id INTEGER NOT NULL,
CONSTRAINT FK_maintenance FOREIGN KEY (maintenance_id) REFERENCES maintenance (id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_status_page FOREIGN KEY (status_page_id) REFERENCES status_page (id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX [status_page_id_index]
ON [maintenance_status_page]([status_page_id]);
CREATE INDEX [maintenance_id_index]
ON [maintenance_status_page]([maintenance_id]);
-- maintenance_timeslot
CREATE TABLE [maintenance_timeslot] (
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[maintenance_id] INTEGER NOT NULL CONSTRAINT [FK_maintenance] REFERENCES [maintenance]([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[start_date] DATETIME NOT NULL,
[end_date] DATETIME,
[generated_next] BOOLEAN DEFAULT 0
);
CREATE INDEX [maintenance_id] ON [maintenance_timeslot] ([maintenance_id] DESC);
CREATE INDEX [active_timeslot_index] ON [maintenance_timeslot] (
[maintenance_id] DESC,
[start_date] DESC,
[end_date] DESC
);
CREATE INDEX [generated_next_index] ON [maintenance_timeslot] ([generated_next]);
-- monitor_maintenance
CREATE TABLE monitor_maintenance (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
maintenance_id INTEGER NOT NULL,
CONSTRAINT FK_maintenance FOREIGN KEY (maintenance_id) REFERENCES maintenance (id) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT FK_monitor FOREIGN KEY (monitor_id) REFERENCES monitor (id) ON DELETE CASCADE ON UPDATE CASCADE
);
CREATE INDEX [maintenance_id_index2] ON [monitor_maintenance]([maintenance_id]);
CREATE INDEX [monitor_id_index] ON [monitor_maintenance]([monitor_id]);
COMMIT;

@ -0,0 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD packet_size INTEGER DEFAULT 56 NOT NULL;
COMMIT;

@ -3,6 +3,6 @@ FROM node:16-alpine3.12
WORKDIR /app WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv # Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \ RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
pip3 --no-cache-dir install apprise==1.0.0 && \ pip3 --no-cache-dir install apprise==1.2.1 && \
rm -rf /root/.cache rm -rf /root/.cache

@ -0,0 +1,16 @@
############################################
# Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
############################################
FROM golang:1.19-buster
WORKDIR /app
ARG TARGETPLATFORM
COPY ./extra/ ./extra/
# Compile healthcheck.go
RUN apt update && \
apt --yes --no-install-recommends install curl && \
curl -sL https://deb.nodesource.com/setup_18.x | bash && \
apt --yes --no-install-recommends install nodejs && \
node ./extra/build-healthcheck.js $TARGETPLATFORM && \
apt --yes remove nodejs

@ -10,8 +10,8 @@ WORKDIR /app
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine! # Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
RUN apt update && \ RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \ apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init && \ sqlite3 iputils-ping util-linux dumb-init git && \
pip3 --no-cache-dir install apprise==1.0.0 && \ pip3 --no-cache-dir install apprise==1.2.1 && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove apt --yes autoremove

@ -1,30 +1,50 @@
############################################
# Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
# Check file: builder-go.dockerfile
############################################
FROM louislam/uptime-kuma:builder-go AS build_healthcheck
############################################
# Build in Node.js
############################################
FROM louislam/uptime-kuma:base-debian AS build FROM louislam/uptime-kuma:base-debian AS build
WORKDIR /app WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . . COPY . .
RUN npm ci --production && \ COPY --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
chmod +x /app/extra/entrypoint.sh RUN chmod +x /app/extra/entrypoint.sh
############################################
# ⭐ Main Image
############################################
FROM louislam/uptime-kuma:base-debian AS release FROM louislam/uptime-kuma:base-debian AS release
WORKDIR /app WORKDIR /app
# Copy app files from build layer # Copy app files from build layer
COPY --from=build /app /app COPY --from=build /app /app
EXPOSE 3001 EXPOSE 3001
VOLUME ["/app/data"] VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"] ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"] CMD ["node", "server/server.js"]
############################################
# Mark as Nightly
############################################
FROM release AS nightly FROM release AS nightly
RUN npm run mark-as-nightly RUN npm run mark-as-nightly
############################################
# Build an image for testing pr # Build an image for testing pr
############################################
FROM louislam/uptime-kuma:base-debian AS pr-test FROM louislam/uptime-kuma:base-debian AS pr-test
WORKDIR /app WORKDIR /app
@ -54,8 +74,9 @@ VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
CMD ["npm", "run", "start-pr-test"] CMD ["npm", "run", "start-pr-test"]
############################################
# Upload the artifact to Github # Upload the artifact to Github
############################################
FROM louislam/uptime-kuma:base-debian AS upload-artifact FROM louislam/uptime-kuma:base-debian AS upload-artifact
WORKDIR / WORKDIR /
RUN apt update && \ RUN apt update && \

@ -3,10 +3,12 @@ WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . . COPY . .
RUN npm ci --production && \ RUN chmod +x /app/extra/entrypoint.sh
chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app WORKDIR /app

@ -32,6 +32,10 @@ if (! exists) {
process.exit(1); process.exit(1);
} }
/**
* Commit updated files
* @param {string} version Version to update to
*/
function commit(version) { function commit(version) {
let msg = "Update to " + version; let msg = "Update to " + version;
@ -47,6 +51,10 @@ function commit(version) {
console.log(res.stdout.toString().trim()); console.log(res.stdout.toString().trim());
} }
/**
* Create a tag with the specified version
* @param {string} version Tag to create
*/
function tag(version) { function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]); let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim()); console.log(res.stdout.toString().trim());
@ -55,6 +63,11 @@ function tag(version) {
console.log(res.stdout.toString().trim()); console.log(res.stdout.toString().trim());
} }
/**
* Check if a tag exists for the specified version
* @param {string} version Version to check
* @returns {boolean} Does the tag already exist
*/
function tagExists(version) { function tagExists(version) {
if (! version) { if (! version) {
throw new Error("invalid version"); throw new Error("invalid version");

@ -0,0 +1,27 @@
const childProcess = require("child_process");
const fs = require("fs");
const platform = process.argv[2];
if (!platform) {
console.error("No platform??");
process.exit(1);
}
if (platform === "linux/arm/v7") {
console.log("Arch: armv7");
if (fs.existsSync("./extra/healthcheck-armv7")) {
fs.renameSync("./extra/healthcheck-armv7", "./extra/healthcheck");
console.log("Already built in the host, skip.");
process.exit(0);
} else {
console.log("prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build.");
}
} else {
if (fs.existsSync("./extra/healthcheck-armv7")) {
fs.rmSync("./extra/healthcheck-armv7");
}
}
const output = childProcess.execSync("go build -x -o ./extra/healthcheck ./extra/healthcheck.go").toString("utf8");
console.log(output);

@ -27,7 +27,7 @@ result = childProcess.spawnSync("git", [ "fetch", name, branch ]);
console.log(result.stdout.toString()); console.log(result.stdout.toString());
console.error(result.stderr.toString()); console.error(result.stderr.toString());
result = childProcess.spawnSync("git", [ "checkout", branch, "--force" ]); result = childProcess.spawnSync("git", [ "checkout", `${name}/${branch}`, "--force" ]);
console.log(result.stdout.toString()); console.log(result.stdout.toString());
console.error(result.stderr.toString()); console.error(result.stderr.toString());

@ -25,6 +25,10 @@ if (platform === "linux/amd64") {
const file = fs.createWriteStream("cloudflared.deb"); const file = fs.createWriteStream("cloudflared.deb");
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb"); get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
/**
* Download specified file
* @param {string} url URL to request
*/
function get(url) { function get(url) {
http.get(url, function (res) { http.get(url, function (res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {

@ -0,0 +1,81 @@
/*
* If changed, have to run `npm run build-docker-builder-go`.
* This script should be run after a period of time (180s), because the server may need some time to prepare.
*/
package main
import (
"crypto/tls"
"io/ioutil"
"log"
"net/http"
"os"
"runtime"
"time"
)
func main() {
isFreeBSD := runtime.GOOS == "freebsd"
// process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
http.DefaultTransport.(*http.Transport).TLSClientConfig = &tls.Config{
InsecureSkipVerify: true,
}
client := http.Client{
Timeout: 28 * time.Second,
}
sslKey := os.Getenv("UPTIME_KUMA_SSL_KEY")
if len(sslKey) == 0 {
sslKey = os.Getenv("SSL_KEY")
}
sslCert := os.Getenv("UPTIME_KUMA_SSL_CERT")
if len(sslCert) == 0 {
sslCert = os.Getenv("SSL_CERT")
}
hostname := os.Getenv("UPTIME_KUMA_HOST")
if len(hostname) == 0 && !isFreeBSD {
hostname = os.Getenv("HOST")
}
if len(hostname) == 0 {
hostname = "127.0.0.1"
}
port := os.Getenv("UPTIME_KUMA_PORT")
if len(port) == 0 {
port = os.Getenv("PORT")
}
if len(port) == 0 {
port = "3001"
}
protocol := ""
if len(sslKey) != 0 && len(sslCert) != 0 {
protocol = "https"
} else {
protocol = "http"
}
url := protocol + "://" + hostname + ":" + port
log.Println("Checking " + url)
resp, err := client.Get(url)
if err != nil {
log.Fatalln(err)
}
defer resp.Body.Close()
_, err = ioutil.ReadAll(resp.Body)
if err != nil {
log.Fatalln(err)
}
log.Printf("Health Check OK [Res Code: %d]\n", resp.StatusCode)
}

@ -1,4 +1,5 @@
/* /*
* Deprecated: Changed to healthcheck.go, it will be deleted in the future.
* This script should be run after a period of time (180s), because the server may need some time to prepare. * This script should be run after a period of time (180s), because the server may need some time to prepare.
*/ */
const { FBSD } = require("../server/util-server"); const { FBSD } = require("../server/util-server");

@ -5,7 +5,7 @@ const util = require("../src/util");
util.polyfill(); util.polyfill();
const oldVersion = pkg.version; const oldVersion = pkg.version;
const newVersion = oldVersion + "-nightly"; const newVersion = oldVersion + "-nightly-" + util.genSecret(8);
console.log("Old Version: " + oldVersion); console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion); console.log("New Version: " + newVersion);

@ -43,6 +43,11 @@ const main = async () => {
console.log("Finished."); console.log("Finished.");
}; };
/**
* Ask question of user
* @param {string} question Question to ask
* @returns {Promise<string>} Users response
*/
function question(question) { function question(question) {
return new Promise((resolve) => { return new Promise((resolve) => {
rl.question(question, (answer) => { rl.question(question, (answer) => {

@ -53,6 +53,11 @@ const main = async () => {
console.log("Finished."); console.log("Finished.");
}; };
/**
* Ask question of user
* @param {string} question Question to ask
* @returns {Promise<string>} Users response
*/
function question(question) { function question(question) {
return new Promise((resolve) => { return new Promise((resolve) => {
rl.question(question, (answer) => { rl.question(question, (answer) => {

@ -135,6 +135,11 @@ server.listen({
udp: 5300 udp: 5300
}); });
/**
* Get human readable request type from request code
* @param {number} code Request code to translate
* @returns {string} Human readable request type
*/
function type(code) { function type(code) {
for (let name in Packet.TYPE) { for (let name in Packet.TYPE) {
if (Packet.TYPE[name] === code) { if (Packet.TYPE[name] === code) {

@ -11,6 +11,7 @@ class SimpleMqttServer {
this.port = port; this.port = port;
} }
/** Start the MQTT server */
start() { start() {
this.server.listen(this.port, () => { this.server.listen(this.port, () => {
console.log("server started and listening on port ", this.port); console.log("server started and listening on port ", this.port);

@ -1,51 +1,45 @@
// Need to use ES6 to read language files // Need to use ES6 to read language files
import fs from "fs"; import fs from "fs";
import path from "path";
import util from "util"; import util from "util";
import rmSync from "../fs-rmSync.js"; import rmSync from "../fs-rmSync.js";
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
/** /**
* Look ma, it's cp -R. * Copy across the required language files
* @param {string} src The path to the thing to copy. * Creates a local directory (./languages) and copies the required files
* @param {string} dest The path to the new copy. * into it.
* @param {string} langCode Code of language to update. A file will be
* created with this code if one does not already exist
* @param {string} baseLang The second base language file to copy. This
* will be ignored if set to "en" as en.js is copied by default
*/ */
const copyRecursiveSync = function (src, dest) { function copyFiles(langCode, baseLang) {
let exists = fs.existsSync(src); if (fs.existsSync("./languages")) {
let stats = exists && fs.statSync(src); rmSync("./languages", { recursive: true });
let isDirectory = exists && stats.isDirectory();
if (isDirectory) {
fs.mkdirSync(dest);
fs.readdirSync(src).forEach(function (childItemName) {
copyRecursiveSync(path.join(src, childItemName),
path.join(dest, childItemName));
});
} else {
fs.copyFileSync(src, dest);
} }
}; fs.mkdirSync("./languages");
console.log("Arguments:", process.argv); if (!fs.existsSync(`../../src/languages/${langCode}.js`)) {
const baseLangCode = process.argv[2] || "en"; fs.closeSync(fs.openSync(`./languages/${langCode}.js`, "a"));
console.log("Base Lang: " + baseLangCode); } else {
if (fs.existsSync("./languages")) { fs.copyFileSync(`../../src/languages/${langCode}.js`, `./languages/${langCode}.js`);
rmSync("./languages", { recursive: true }); }
fs.copyFileSync("../../src/languages/en.js", "./languages/en.js");
if (baseLang !== "en") {
fs.copyFileSync(`../../src/languages/${baseLang}.js`, `./languages/${baseLang}.js`);
}
} }
copyRecursiveSync("../../src/languages", "./languages");
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
const files = fs.readdirSync("./languages");
console.log("Files:", files);
for (const file of files) { /**
if (! file.endsWith(".js")) { * Update the specified language file
console.log("Skipping " + file); * @param {string} langCode Language code to update
continue; * @param {string} baseLang Second language to copy keys from
} */
async function updateLanguage(langCode, baseLangCode) {
const en = (await import("./languages/en.js")).default;
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default;
let file = langCode + ".js";
console.log("Processing " + file); console.log("Processing " + file);
const lang = await import("./languages/" + file); const lang = await import("./languages/" + file);
@ -83,5 +77,20 @@ for (const file of files) {
fs.writeFileSync(`../../src/languages/${file}`, code); fs.writeFileSync(`../../src/languages/${file}`, code);
} }
// Get command line arguments
const baseLangCode = process.env.npm_config_baselang || "en";
const langCode = process.env.npm_config_language;
// We need the file to edit
if (langCode == null) {
throw new Error("Argument --language=<code> must be provided");
}
console.log("Base Lang: " + baseLangCode);
console.log("Updating: " + langCode);
copyFiles(langCode, baseLangCode);
await updateLanguage(langCode, baseLangCode);
rmSync("./languages", { recursive: true }); rmSync("./languages", { recursive: true });
console.log("Done. Fixing formatting by ESLint..."); console.log("Done. Fixing formatting by ESLint...");

@ -36,10 +36,8 @@ if (! exists) {
} }
/** /**
* Updates the version number in package.json and commits it to git. * Commit updated files
* @param {string} version - The new version number * @param {string} version Version to update to
*
* Generated by Trelent
*/ */
function commit(version) { function commit(version) {
let msg = "Update to " + version; let msg = "Update to " + version;
@ -53,16 +51,19 @@ function commit(version) {
} }
} }
/**
* Create a tag with the specified version
* @param {string} version Tag to create
*/
function tag(version) { function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]); let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim()); console.log(res.stdout.toString().trim());
} }
/** /**
* Checks if a given version is already tagged in the git repository. * Check if a tag exists for the specified version
* @param {string} version - The version to check for. * @param {string} version Version to check
* * @returns {boolean} Does the tag already exist
* Generated by Trelent
*/ */
function tagExists(version) { function tagExists(version) {
if (! version) { if (! version) {

@ -10,6 +10,10 @@ if (!newVersion) {
updateWiki(newVersion); updateWiki(newVersion);
/**
* Update the wiki with new version number
* @param {string} newVersion Version to update to
*/
function updateWiki(newVersion) { function updateWiki(newVersion) {
const wikiDir = "./tmp/wiki"; const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md"; const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
@ -39,6 +43,10 @@ function updateWiki(newVersion) {
safeDelete(wikiDir); safeDelete(wikiDir);
} }
/**
* Check if a directory exists and then delete it
* @param {string} dir Directory to delete
*/
function safeDelete(dir) { function safeDelete(dir) {
if (fs.existsSync(dir)) { if (fs.existsSync(dir)) {
fs.rm(dir, { fs.rm(dir, {

23942
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{ {
"name": "uptime-kuma", "name": "uptime-kuma",
"version": "1.18.0", "version": "1.19.6",
"license": "MIT", "license": "MIT",
"repository": { "repository": {
"type": "git", "type": "git",
@ -23,16 +23,15 @@
"start-server": "node server/server.js", "start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js", "start-server-dev": "cross-env NODE_ENV=development node server/server.js",
"build": "vite build --config ./config/vite.config.js", "build": "vite build --config ./config/vite.config.js",
"test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --test", "test": "node test/prepare-test-server.js && npm run jest-backend",
"test-with-build": "npm run build && npm test", "test-with-build": "npm run build && npm test",
"jest": "node test/prepare-jest.js && npm run jest-frontend && npm run jest-backend", "jest-backend": "cross-env TEST_BACKEND=1 jest --runInBand --detectOpenHandles --forceExit --config=./config/jest-backend.config.js",
"jest-frontend": "cross-env TEST_FRONTEND=1 jest --config=./config/jest-frontend.config.js",
"jest-backend": "cross-env TEST_BACKEND=1 jest --config=./config/jest-backend.config.js",
"tsc": "tsc", "tsc": "tsc",
"vite-preview-dist": "vite preview --host --config ./config/vite.config.js", "vite-preview-dist": "vite preview --host --config ./config/vite.config.js",
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine", "build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push", "build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push", "build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-builder-go": "docker buildx build -f docker/builder-go.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:builder-go . --push",
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push", "build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push", "build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push", "build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
@ -40,7 +39,7 @@
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain", "build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push", "build-docker-pr-test": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64 -t louislam/uptime-kuma:pr-test --target pr-test . --push",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain", "upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.18.0 && npm ci --production && npm run download-dist", "setup": "git checkout 1.19.6 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js", "download-dist": "node extra/download-dist.js",
"mark-as-nightly": "node extra/mark-as-nightly.js", "mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js", "reset-password": "node extra/reset-password.js",
@ -53,8 +52,7 @@
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .", "test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js", "simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js", "simple-mqtt-server": "node extra/simple-mqtt-server.js",
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix", "update-language-files": "cd extra/update-language-files && node index.js && cross-env-shell eslint ../../src/languages/$npm_config_language.js --fix",
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix",
"ncu-patch": "npm-check-updates -u -t patch", "ncu-patch": "npm-check-updates -u -t patch",
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js", "release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts", "release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
@ -62,52 +60,63 @@
"build-dist-and-restart": "npm run build && npm run start-server-dev", "build-dist-and-restart": "npm run build && npm run start-server-dev",
"start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev", "start-pr-test": "node extra/checkout-pr.js && npm install && npm run dev",
"cy:test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --e2e", "cy:test": "node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/ --e2e",
"cy:run": "npx cypress run --browser chrome --headless" "cy:run": "npx cypress run --browser chrome --headless --config-file ./config/cypress.config.js",
"cy:run:unit": "npx cypress run --browser chrome --headless --config-file ./config/cypress.frontend.config.js",
"cypress-open": "concurrently -k -r \"node test/prepare-test-server.js && node server/server.js --port=3002 --data-dir=./data/test/\" \"cypress open --config-file ./config/cypress.config.js\"",
"build-healthcheck-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./extra/healthcheck-armv7 ./extra/healthcheck.go"
}, },
"dependencies": { "dependencies": {
"@louislam/sqlite3": "~15.0.6", "@grpc/grpc-js": "~1.7.3",
"@louislam/ping": "~0.4.2-mod.1",
"@louislam/sqlite3": "15.1.2",
"args-parser": "~1.3.0", "args-parser": "~1.3.0",
"axios": "~0.27.0", "axios": "~0.27.0",
"axios-ntlm": "^1.3.0", "axios-ntlm": "1.3.0",
"badge-maker": "^3.3.1", "badge-maker": "~3.3.1",
"bcryptjs": "~2.4.3", "bcryptjs": "~2.4.3",
"bree": "~7.1.5", "bree": "~7.1.5",
"cacheable-lookup": "~6.0.4", "cacheable-lookup": "~6.0.4",
"chardet": "^1.3.0", "chardet": "~1.4.0",
"check-password-strength": "^2.0.5", "check-password-strength": "^2.0.5",
"cheerio": "^1.0.0-rc.10", "cheerio": "~1.0.0-rc.12",
"chroma-js": "^2.1.2", "chroma-js": "~2.4.2",
"command-exists": "~1.2.9", "command-exists": "~1.2.9",
"compare-versions": "~3.6.0", "compare-versions": "~3.6.0",
"compression": "^1.7.4", "compression": "~1.7.4",
"dayjs": "^1.11.0", "dayjs": "~1.11.5",
"express": "~4.17.3", "express": "~4.17.3",
"express-basic-auth": "~1.2.1", "express-basic-auth": "~1.2.1",
"express-static-gzip": "^2.1.7", "express-static-gzip": "~2.1.7",
"form-data": "~4.0.0", "form-data": "~4.0.0",
"gamedig": "^4.0.5",
"http-graceful-shutdown": "~3.1.7", "http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "^5.0.0", "http-proxy-agent": "~5.0.0",
"https-proxy-agent": "^5.0.0", "https-proxy-agent": "~5.0.1",
"iconv-lite": "^0.6.3", "iconv-lite": "~0.6.3",
"jsonwebtoken": "~8.5.1", "jsesc": "~3.0.2",
"jwt-decode": "^3.1.2", "jsonwebtoken": "~9.0.0",
"limiter": "^2.1.0", "jwt-decode": "~3.1.2",
"mqtt": "^4.2.8", "limiter": "~2.1.0",
"mssql": "^8.1.0", "mongodb": "~4.13.0",
"mqtt": "~4.3.7",
"mssql": "~8.1.4",
"mysql2": "~2.3.3",
"node-cloudflared-tunnel": "~1.0.9", "node-cloudflared-tunnel": "~1.0.9",
"node-radius-client": "^1.0.0", "node-radius-client": "~1.0.0",
"nodemailer": "~6.6.5", "nodemailer": "~6.6.5",
"notp": "~2.0.3", "notp": "~2.0.3",
"password-hash": "~1.2.2", "password-hash": "~1.2.2",
"pg": "^8.7.3", "pg": "~8.8.0",
"pg-connection-string": "^2.5.0", "pg-connection-string": "~2.5.0",
"prom-client": "~13.2.0", "prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.1", "prometheus-api-metrics": "~3.2.1",
"redbean-node": "0.1.4", "protobufjs": "~7.1.1",
"socket.io": "~4.4.1", "redbean-node": "~0.2.0",
"socket.io-client": "~4.4.1", "redis": "~4.5.1",
"socket.io": "~4.5.3",
"socket.io-client": "~4.5.3",
"socks-proxy-agent": "6.1.1", "socks-proxy-agent": "6.1.1",
"tar": "^6.1.11", "tar": "~6.1.11",
"tcp-ping": "~0.1.1", "tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2" "thirty-two": "~1.0.2"
}, },
@ -124,33 +133,32 @@
"@vitejs/plugin-legacy": "~2.1.0", "@vitejs/plugin-legacy": "~2.1.0",
"@vitejs/plugin-vue": "~3.1.0", "@vitejs/plugin-vue": "~3.1.0",
"@vue/compiler-sfc": "~3.2.36", "@vue/compiler-sfc": "~3.2.36",
"@vuepic/vue-datepicker": "~3.4.8",
"aedes": "^0.46.3", "aedes": "^0.46.3",
"babel-plugin-rewire": "~1.2.0", "babel-plugin-rewire": "~1.2.0",
"bootstrap": "5.1.3", "bootstrap": "5.1.3",
"chart.js": "~3.6.2", "chart.js": "~3.6.2",
"chartjs-adapter-dayjs": "~1.0.0", "chartjs-adapter-dayjs": "~1.0.0",
"concurrently": "^7.1.0", "concurrently": "^7.1.0",
"core-js": "~3.18.3", "core-js": "~3.26.1",
"cross-env": "~7.0.3", "cross-env": "~7.0.3",
"cypress": "^10.1.0", "cypress": "^10.1.0",
"delay": "^5.0.0", "delay": "^5.0.0",
"dns2": "~2.0.1", "dns2": "~2.0.1",
"eslint": "~8.14.0", "eslint": "~8.14.0",
"eslint-plugin-vue": "~8.7.1", "eslint-plugin-vue": "~8.7.1",
"favico.js": "^0.3.10", "favico.js": "~0.3.10",
"jest": "~27.2.5", "jest": "~27.2.5",
"jest-puppeteer": "~6.0.3",
"postcss-html": "~1.5.0", "postcss-html": "~1.5.0",
"postcss-rtlcss": "~3.7.2", "postcss-rtlcss": "~3.7.2",
"postcss-scss": "~4.0.4", "postcss-scss": "~4.0.4",
"prismjs": "^1.27.0", "prismjs": "~1.29.0",
"puppeteer": "~13.1.3",
"qrcode": "~1.5.0", "qrcode": "~1.5.0",
"rollup-plugin-visualizer": "^5.6.0", "rollup-plugin-visualizer": "^5.6.0",
"sass": "~1.42.1", "sass": "~1.42.1",
"stylelint": "~14.7.1", "stylelint": "~14.7.1",
"stylelint-config-standard": "~25.0.0", "stylelint-config-standard": "~25.0.0",
"terser": "^5.15.0", "terser": "~5.15.0",
"timezones-list": "~3.0.1", "timezones-list": "~3.0.1",
"typescript": "~4.4.4", "typescript": "~4.4.4",
"v-pagination-3": "~0.1.7", "v-pagination-3": "~0.1.7",
@ -160,10 +168,10 @@
"vue-chart-3": "3.0.9", "vue-chart-3": "3.0.9",
"vue-confirm-dialog": "~1.0.2", "vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4", "vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.1.9", "vue-i18n": "~9.2.2",
"vue-image-crop-upload": "~3.0.3", "vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2", "vue-multiselect": "~3.0.0-alpha.2",
"vue-prism-editor": "^2.0.0-alpha.2", "vue-prism-editor": "~2.0.0-alpha.2",
"vue-qrcode": "~1.0.0", "vue-qrcode": "~1.0.0",
"vue-router": "~4.0.14", "vue-router": "~4.0.14",
"vue-toastification": "~2.0.0-rc.5", "vue-toastification": "~2.0.0-rc.5",

@ -63,6 +63,12 @@ function myAuthorizer(username, password, callback) {
}); });
} }
/**
* Use basic auth if auth is not disabled
* @param {express.Request} req Express request object
* @param {express.Response} res Express response object
* @param {express.NextFunction} next
*/
exports.basicAuth = async function (req, res, next) { exports.basicAuth = async function (req, res, next) {
const middleware = basicAuth({ const middleware = basicAuth({
authorizer: myAuthorizer, authorizer: myAuthorizer,

@ -1,6 +1,8 @@
const https = require("https"); const https = require("https");
const http = require("http"); const http = require("http");
const CacheableLookup = require("cacheable-lookup"); const CacheableLookup = require("cacheable-lookup");
const { Settings } = require("./settings");
const { log } = require("../src/util");
class CacheableDnsHttpAgent { class CacheableDnsHttpAgent {
@ -9,14 +11,36 @@ class CacheableDnsHttpAgent {
static httpAgentList = {}; static httpAgentList = {};
static httpsAgentList = {}; static httpsAgentList = {};
static enable = false;
/** /**
* Register cacheable to global agents * Register/Disable cacheable to global agents
*/ */
static registerGlobalAgent() { static async update() {
this.cacheable.install(http.globalAgent); log.debug("CacheableDnsHttpAgent", "update");
this.cacheable.install(https.globalAgent); let isEnable = await Settings.get("dnsCache");
if (isEnable !== this.enable) {
log.debug("CacheableDnsHttpAgent", "value changed");
if (isEnable) {
log.debug("CacheableDnsHttpAgent", "enable");
this.cacheable.install(http.globalAgent);
this.cacheable.install(https.globalAgent);
} else {
log.debug("CacheableDnsHttpAgent", "disable");
this.cacheable.uninstall(http.globalAgent);
this.cacheable.uninstall(https.globalAgent);
}
}
this.enable = isEnable;
} }
/**
* Attach cacheable to HTTP agent
* @param {http.Agent} agent Agent to install
*/
static install(agent) { static install(agent) {
this.cacheable.install(agent); this.cacheable.install(agent);
} }
@ -26,6 +50,10 @@ class CacheableDnsHttpAgent {
* @return {https.Agent} * @return {https.Agent}
*/ */
static getHttpsAgent(agentOptions) { static getHttpsAgent(agentOptions) {
if (!this.enable) {
return new https.Agent(agentOptions);
}
let key = JSON.stringify(agentOptions); let key = JSON.stringify(agentOptions);
if (!(key in this.httpsAgentList)) { if (!(key in this.httpsAgentList)) {
this.httpsAgentList[key] = new https.Agent(agentOptions); this.httpsAgentList[key] = new https.Agent(agentOptions);
@ -39,6 +67,10 @@ class CacheableDnsHttpAgent {
* @return {https.Agents} * @return {https.Agents}
*/ */
static getHttpAgent(agentOptions) { static getHttpAgent(agentOptions) {
if (!this.enable) {
return new http.Agent(agentOptions);
}
let key = JSON.stringify(agentOptions); let key = JSON.stringify(agentOptions);
if (!(key in this.httpAgentList)) { if (!(key in this.httpAgentList)) {
this.httpAgentList[key] = new http.Agent(agentOptions); this.httpAgentList[key] = new http.Agent(agentOptions);

@ -25,7 +25,7 @@ exports.startInterval = () => {
let checkBeta = await setting("checkBeta"); let checkBeta = await setting("checkBeta");
if (checkBeta && res.data.beta) { if (checkBeta && res.data.beta) {
if (compareVersions.compare(res.data.beta, res.data.beta, ">")) { if (compareVersions.compare(res.data.beta, res.data.slow, ">")) {
exports.latestVersion = res.data.beta; exports.latestVersion = res.data.beta;
return; return;
} }

@ -4,7 +4,8 @@
const { TimeLogger } = require("../src/util"); const { TimeLogger } = require("../src/util");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const { UptimeKumaServer } = require("./uptime-kuma-server"); const { UptimeKumaServer } = require("./uptime-kuma-server");
const io = UptimeKumaServer.getInstance().io; const server = UptimeKumaServer.getInstance();
const io = server.io;
const { setting } = require("./util-server"); const { setting } = require("./util-server");
const checkVersion = require("./check-version"); const checkVersion = require("./check-version");
@ -121,7 +122,9 @@ async function sendInfo(socket) {
socket.emit("info", { socket.emit("info", {
version: checkVersion.version, version: checkVersion.version,
latestVersion: checkVersion.latestVersion, latestVersion: checkVersion.latestVersion,
primaryBaseURL: await setting("primaryBaseURL") primaryBaseURL: await setting("primaryBaseURL"),
serverTimezone: await server.getTimezone(),
serverTimezoneOffset: server.getTimezoneOffset(),
}); });
} }

@ -5,6 +5,8 @@ const badgeConstants = {
naColor: "#999", naColor: "#999",
defaultUpColor: "#66c20a", defaultUpColor: "#66c20a",
defaultDownColor: "#c2290a", defaultDownColor: "#c2290a",
defaultPendingColor: "#f8a306",
defaultMaintenanceColor: "#1747f5",
defaultPingColor: "blue", // as defined by badge-maker / shields.io defaultPingColor: "blue", // as defined by badge-maker / shields.io
defaultStyle: "flat", defaultStyle: "flat",
defaultPingValueSuffix: "ms", defaultPingValueSuffix: "ms",

@ -4,6 +4,7 @@ const { setSetting, setting } = require("./util-server");
const { log, sleep } = require("../src/util"); const { log, sleep } = require("../src/util");
const dayjs = require("dayjs"); const dayjs = require("dayjs");
const knex = require("knex"); const knex = require("knex");
const { PluginsManager } = require("./plugins-manager");
/** /**
* Database & App Data Folder * Database & App Data Folder
@ -62,8 +63,12 @@ class Database {
"patch-add-clickable-status-page-link.sql": true, "patch-add-clickable-status-page-link.sql": true,
"patch-add-sqlserver-monitor.sql": true, "patch-add-sqlserver-monitor.sql": true,
"patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] }, "patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] },
"patch-grpc-monitor.sql": true,
"patch-add-radius-monitor.sql": true, "patch-add-radius-monitor.sql": true,
"patch-monitor-add-resend-interval.sql": true, "patch-monitor-add-resend-interval.sql": true,
"patch-ping-packet-size.sql": true,
"patch-maintenance-table2.sql": true,
"patch-add-gamedig-monitor.sql": true,
}; };
/** /**
@ -81,6 +86,13 @@ class Database {
static init(args) { static init(args) {
// Data Directory (must be end with "/") // Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/"; Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
// Plugin feature is working only if the dataDir = "./data";
if (Database.dataDir !== "./data/") {
log.warn("PLUGIN", "Warning: In order to enable plugin feature, you need to use the default data directory: ./data/");
PluginsManager.disable = true;
}
Database.path = Database.dataDir + "kuma.db"; Database.path = Database.dataDir + "kuma.db";
if (! fs.existsSync(Database.dataDir)) { if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true }); fs.mkdirSync(Database.dataDir, { recursive: true });
@ -150,9 +162,6 @@ class Database {
await R.exec("PRAGMA cache_size = -12000"); await R.exec("PRAGMA cache_size = -12000");
await R.exec("PRAGMA auto_vacuum = FULL"); await R.exec("PRAGMA auto_vacuum = FULL");
// Avoid error "SQLITE_BUSY: database is locked" by allowing SQLITE to wait up to 5 seconds to do a write
await R.exec("PRAGMA busy_timeout = 5000");
// This ensures that an operating system crash or power failure will not corrupt the database. // This ensures that an operating system crash or power failure will not corrupt the database.
// FULL synchronous is very safe, but it is also slower. // FULL synchronous is very safe, but it is also slower.
// Read more: https://sqlite.org/pragma.html#pragma_synchronous // Read more: https://sqlite.org/pragma.html#pragma_synchronous

@ -75,7 +75,7 @@ class DockerHost {
if (dockerHost.dockerType === "socket") { if (dockerHost.dockerType === "socket") {
options.socketPath = dockerHost.dockerDaemon; options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") { } else if (dockerHost.dockerType === "tcp") {
options.baseURL = dockerHost.dockerDaemon; options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
} }
let res = await axios.request(options); let res = await axios.request(options);
@ -99,6 +99,18 @@ class DockerHost {
} }
} }
/**
* Since axios 0.27.X, it does not accept `tcp://` protocol.
* Change it to `http://` on the fly in order to fix it. (https://github.com/louislam/uptime-kuma/issues/2165)
*/
static patchDockerURL(url) {
if (typeof url === "string") {
// Replace the first occurrence only with g
return url.replace(/tcp:\/\//g, "http://");
}
return url;
}
} }
module.exports = { module.exports = {

@ -0,0 +1,24 @@
const childProcess = require("child_process");
class Git {
static clone(repoURL, cwd, targetDir = ".") {
let result = childProcess.spawnSync("git", [
"clone",
repoURL,
targetDir,
], {
cwd: cwd,
});
if (result.status !== 0) {
throw new Error(result.stderr.toString("utf-8"));
} else {
return result.stdout.toString("utf-8") + result.stderr.toString("utf-8");
}
}
}
module.exports = {
Git,
};

@ -32,6 +32,7 @@ const initBackgroundJobs = function (args) {
return bree; return bree;
}; };
/** Stop all background jobs if running */
const stopBackgroundJobs = function () { const stopBackgroundJobs = function () {
if (bree) { if (bree) {
bree.stop(); bree.stop();

@ -25,15 +25,20 @@ const DEFAULT_KEEP_PERIOD = 180;
parsedPeriod = DEFAULT_KEEP_PERIOD; parsedPeriod = DEFAULT_KEEP_PERIOD;
} }
log(`Clearing Data older than ${parsedPeriod} days...`); if (parsedPeriod < 1) {
log(`Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
try { } else {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ", log(`Clearing Data older than ${parsedPeriod} days...`);
[ parsedPeriod ]
); try {
} catch (e) { await R.exec(
log(`Failed to clear old data: ${e.message}`); "DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[ parsedPeriod ]
);
} catch (e) {
log(`Failed to clear old data: ${e.message}`);
}
} }
exit(); exit();

@ -1,8 +1,3 @@
const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const { BeanModel } = require("redbean-node/dist/bean-model"); const { BeanModel } = require("redbean-node/dist/bean-model");
/** /**
@ -10,6 +5,7 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
* 0 = DOWN * 0 = DOWN
* 1 = UP * 1 = UP
* 2 = PENDING * 2 = PENDING
* 3 = MAINTENANCE
*/ */
class Heartbeat extends BeanModel { class Heartbeat extends BeanModel {

@ -0,0 +1,240 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { parseTimeObject, parseTimeFromTimeObject, utcToLocal, localToUTC, log } = require("../../src/util");
const { timeObjectToUTC, timeObjectToLocal } = require("../util-server");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
class Maintenance extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
let dateRange = [];
if (this.start_date) {
dateRange.push(utcToLocal(this.start_date));
if (this.end_date) {
dateRange.push(utcToLocal(this.end_date));
}
}
let timeRange = [];
let startTime = timeObjectToLocal(parseTimeObject(this.start_time));
timeRange.push(startTime);
let endTime = timeObjectToLocal(parseTimeObject(this.end_time));
timeRange.push(endTime);
let obj = {
id: this.id,
title: this.title,
description: this.description,
strategy: this.strategy,
intervalDay: this.interval_day,
active: !!this.active,
dateRange: dateRange,
timeRange: timeRange,
weekdays: (this.weekdays) ? JSON.parse(this.weekdays) : [],
daysOfMonth: (this.days_of_month) ? JSON.parse(this.days_of_month) : [],
timeslotList: [],
};
const timeslotList = await this.getTimeslotList();
for (let timeslot of timeslotList) {
obj.timeslotList.push(await timeslot.toPublicJSON());
}
if (!Array.isArray(obj.weekdays)) {
obj.weekdays = [];
}
if (!Array.isArray(obj.daysOfMonth)) {
obj.daysOfMonth = [];
}
// Maintenance Status
if (!obj.active) {
obj.status = "inactive";
} else if (obj.strategy === "manual") {
obj.status = "under-maintenance";
} else if (obj.timeslotList.length > 0) {
let currentTimestamp = dayjs().unix();
for (let timeslot of obj.timeslotList) {
if (dayjs.utc(timeslot.startDate).unix() <= currentTimestamp && dayjs.utc(timeslot.endDate).unix() >= currentTimestamp) {
log.debug("timeslot", "Timeslot ID: " + timeslot.id);
log.debug("timeslot", "currentTimestamp:" + currentTimestamp);
log.debug("timeslot", "timeslot.start_date:" + dayjs.utc(timeslot.startDate).unix());
log.debug("timeslot", "timeslot.end_date:" + dayjs.utc(timeslot.endDate).unix());
obj.status = "under-maintenance";
break;
}
}
if (!obj.status) {
obj.status = "scheduled";
}
} else if (obj.timeslotList.length === 0) {
obj.status = "ended";
} else {
obj.status = "unknown";
}
return obj;
}
/**
* Only get future or current timeslots only
* @returns {Promise<[]>}
*/
async getTimeslotList() {
return R.convertToBeans("maintenance_timeslot", await R.getAll(`
SELECT maintenance_timeslot.*
FROM maintenance_timeslot, maintenance
WHERE maintenance_timeslot.maintenance_id = maintenance.id
AND maintenance.id = ?
AND ${Maintenance.getActiveAndFutureMaintenanceSQLCondition()}
`, [
this.id
]));
}
/**
* Return an object that ready to parse to JSON
* @param {string} timezone If not specified, the timeRange will be in UTC
* @returns {Object}
*/
async toJSON(timezone = null) {
return this.toPublicJSON(timezone);
}
/**
* Get a list of weekdays that the maintenance is active for
* Monday=1, Tuesday=2 etc.
* @returns {number[]} Array of active weekdays
*/
getDayOfWeekList() {
log.debug("timeslot", "List: " + this.weekdays);
return JSON.parse(this.weekdays).sort(function (a, b) {
return a - b;
});
}
/**
* Get a list of days in month that maintenance is active for
* @returns {number[]} Array of active days in month
*/
getDayOfMonthList() {
return JSON.parse(this.days_of_month).sort(function (a, b) {
return a - b;
});
}
/**
* Get the start date and time for maintenance
* @returns {dayjs.Dayjs} Start date and time
*/
getStartDateTime() {
let startOfTheDay = dayjs.utc(this.start_date).format("HH:mm");
log.debug("timeslot", "startOfTheDay: " + startOfTheDay);
// Start Time
let startTimeSecond = dayjs.utc(this.start_time, "HH:mm").diff(dayjs.utc(startOfTheDay, "HH:mm"), "second");
log.debug("timeslot", "startTime: " + startTimeSecond);
// Bake StartDate + StartTime = Start DateTime
return dayjs.utc(this.start_date).add(startTimeSecond, "second");
}
/**
* Get the duraction of maintenance in seconds
* @returns {number} Duration of maintenance
*/
getDuration() {
let duration = dayjs.utc(this.end_time, "HH:mm").diff(dayjs.utc(this.start_time, "HH:mm"), "second");
// Add 24hours if it is across day
if (duration < 0) {
duration += 24 * 3600;
}
return duration;
}
/**
* Convert data from socket to bean
* @param {Bean} bean Bean to fill in
* @param {Object} obj Data to fill bean with
* @returns {Bean} Filled bean
*/
static jsonToBean(bean, obj) {
if (obj.id) {
bean.id = obj.id;
}
// Apply timezone offset to timeRange, as it cannot apply automatically.
if (obj.timeRange[0]) {
timeObjectToUTC(obj.timeRange[0]);
if (obj.timeRange[1]) {
timeObjectToUTC(obj.timeRange[1]);
}
}
bean.title = obj.title;
bean.description = obj.description;
bean.strategy = obj.strategy;
bean.interval_day = obj.intervalDay;
bean.active = obj.active;
if (obj.dateRange[0]) {
bean.start_date = localToUTC(obj.dateRange[0]);
if (obj.dateRange[1]) {
bean.end_date = localToUTC(obj.dateRange[1]);
}
}
bean.start_time = parseTimeFromTimeObject(obj.timeRange[0]);
bean.end_time = parseTimeFromTimeObject(obj.timeRange[1]);
bean.weekdays = JSON.stringify(obj.weekdays);
bean.days_of_month = JSON.stringify(obj.daysOfMonth);
return bean;
}
/**
* SQL conditions for active maintenance
* @returns {string}
*/
static getActiveMaintenanceSQLCondition() {
return `
(
(maintenance_timeslot.start_date <= DATETIME('now')
AND maintenance_timeslot.end_date >= DATETIME('now')
AND maintenance.active = 1)
OR
(maintenance.strategy = 'manual' AND active = 1)
)
`;
}
/**
* SQL conditions for active and future maintenance
* @returns {string}
*/
static getActiveAndFutureMaintenanceSQLCondition() {
return `
(
((maintenance_timeslot.end_date >= DATETIME('now')
AND maintenance.active = 1)
OR
(maintenance.strategy = 'manual' AND active = 1))
)
`;
}
}
module.exports = Maintenance;

@ -0,0 +1,198 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
const dayjs = require("dayjs");
const { log, utcToLocal, SQL_DATETIME_FORMAT_WITHOUT_SECOND, localToUTC } = require("../../src/util");
const { UptimeKumaServer } = require("../uptime-kuma-server");
class MaintenanceTimeslot extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
const serverTimezoneOffset = UptimeKumaServer.getInstance().getTimezoneOffset();
const obj = {
id: this.id,
startDate: this.start_date,
endDate: this.end_date,
startDateServerTimezone: utcToLocal(this.start_date, SQL_DATETIME_FORMAT_WITHOUT_SECOND),
endDateServerTimezone: utcToLocal(this.end_date, SQL_DATETIME_FORMAT_WITHOUT_SECOND),
serverTimezoneOffset,
};
return obj;
}
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
async toJSON() {
return await this.toPublicJSON();
}
/**
* @param {Maintenance} maintenance
* @param {dayjs} minDate (For recurring type only) Generate a next timeslot from this date.
* @param {boolean} removeExist Remove existing timeslot before create
* @returns {Promise<MaintenanceTimeslot>}
*/
static async generateTimeslot(maintenance, minDate = null, removeExist = false) {
if (removeExist) {
await R.exec("DELETE FROM maintenance_timeslot WHERE maintenance_id = ? ", [
maintenance.id
]);
}
if (maintenance.strategy === "manual") {
log.debug("maintenance", "No need to generate timeslot for manual type");
} else if (maintenance.strategy === "single") {
let bean = R.dispense("maintenance_timeslot");
bean.maintenance_id = maintenance.id;
bean.start_date = maintenance.start_date;
bean.end_date = maintenance.end_date;
bean.generated_next = true;
return await R.store(bean);
} else if (maintenance.strategy === "recurring-interval") {
// Prevent dead loop, in case interval_day is not set
if (!maintenance.interval_day || maintenance.interval_day <= 0) {
maintenance.interval_day = 1;
}
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
return startDateTime.add(maintenance.interval_day, "day");
}, () => {
return true;
});
} else if (maintenance.strategy === "recurring-weekday") {
let dayOfWeekList = maintenance.getDayOfWeekList();
log.debug("timeslot", dayOfWeekList);
if (dayOfWeekList.length <= 0) {
log.debug("timeslot", "No weekdays selected?");
return null;
}
const isValid = (startDateTime) => {
log.debug("timeslot", "nextDateTime: " + startDateTime);
let day = startDateTime.local().day();
log.debug("timeslot", "nextDateTime.day(): " + day);
return dayOfWeekList.includes(day);
};
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
while (true) {
startDateTime = startDateTime.add(1, "day");
if (isValid(startDateTime)) {
return startDateTime;
}
}
}, isValid);
} else if (maintenance.strategy === "recurring-day-of-month") {
let dayOfMonthList = maintenance.getDayOfMonthList();
if (dayOfMonthList.length <= 0) {
log.debug("timeslot", "No day selected?");
return null;
}
const isValid = (startDateTime) => {
let day = parseInt(startDateTime.local().format("D"));
log.debug("timeslot", "day: " + day);
// Check 1-31
if (dayOfMonthList.includes(day)) {
return startDateTime;
}
// Check "lastDay1","lastDay2"...
let daysInMonth = startDateTime.daysInMonth();
let lastDayList = [];
// Small first, e.g. 28 > 29 > 30 > 31
for (let i = 4; i >= 1; i--) {
if (dayOfMonthList.includes("lastDay" + i)) {
lastDayList.push(daysInMonth - i + 1);
}
}
log.debug("timeslot", lastDayList);
return lastDayList.includes(day);
};
return await this.handleRecurringType(maintenance, minDate, (startDateTime) => {
while (true) {
startDateTime = startDateTime.add(1, "day");
if (isValid(startDateTime)) {
return startDateTime;
}
}
}, isValid);
} else {
throw new Error("Unknown maintenance strategy");
}
}
/**
* Generate a next timeslot for all recurring types
* @param maintenance
* @param minDate
* @param {function} nextDayCallback The logic how to get the next possible day
* @param {function} isValidCallback Check the day whether is matched the current strategy
* @returns {Promise<null|MaintenanceTimeslot>}
*/
static async handleRecurringType(maintenance, minDate, nextDayCallback, isValidCallback) {
let bean = R.dispense("maintenance_timeslot");
let duration = maintenance.getDuration();
let startDateTime = maintenance.getStartDateTime();
let endDateTime;
// Keep generating from the first possible date, until it is ok
while (true) {
log.debug("timeslot", "startDateTime: " + startDateTime.format());
// Handling out of effective date range
if (startDateTime.diff(dayjs.utc(maintenance.end_date)) > 0) {
log.debug("timeslot", "Out of effective date range");
return null;
}
endDateTime = startDateTime.add(duration, "second");
// If endDateTime is out of effective date range, use the end datetime from effective date range
if (endDateTime.diff(dayjs.utc(maintenance.end_date)) > 0) {
endDateTime = dayjs.utc(maintenance.end_date);
}
// If minDate is set, the endDateTime must be bigger than it.
// And the endDateTime must be bigger current time
// Is valid under current recurring strategy
if (
(!minDate || endDateTime.diff(minDate) > 0) &&
endDateTime.diff(dayjs()) > 0 &&
isValidCallback(startDateTime)
) {
break;
}
startDateTime = nextDayCallback(startDateTime);
}
bean.maintenance_id = maintenance.id;
bean.start_date = localToUTC(startDateTime);
bean.end_date = localToUTC(endDateTime);
bean.generated_next = false;
return await R.store(bean);
}
}
module.exports = MaintenanceTimeslot;

@ -1,13 +1,11 @@
const https = require("https"); const https = require("https");
const dayjs = require("dayjs"); const dayjs = require("dayjs");
const utc = require("dayjs/plugin/utc");
let timezone = require("dayjs/plugin/timezone");
dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios"); const axios = require("axios");
const { Prometheus } = require("../prometheus"); const { Prometheus } = require("../prometheus");
const { log, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util"); const { log, UP, DOWN, PENDING, MAINTENANCE, flipStatus, TimeLogger, MAX_INTERVAL_SECOND, MIN_INTERVAL_SECOND } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mqttAsync, setSetting, httpNtlm, radius } = require("../util-server"); const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mysqlQuery, mqttAsync, setSetting, httpNtlm, radius, grpcQuery,
redisPingAsync, mongodbPing,
} = require("../util-server");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model"); const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification"); const { Notification } = require("../notification");
@ -17,12 +15,17 @@ const version = require("../../package.json").version;
const apicache = require("../modules/apicache"); const apicache = require("../modules/apicache");
const { UptimeKumaServer } = require("../uptime-kuma-server"); const { UptimeKumaServer } = require("../uptime-kuma-server");
const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent"); const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
const { DockerHost } = require("../docker");
const Maintenance = require("./maintenance");
const { UptimeCacheList } = require("../uptime-cache-list");
const Gamedig = require("gamedig");
/** /**
* status: * status:
* 0 = DOWN * 0 = DOWN
* 1 = UP * 1 = UP
* 2 = PENDING * 2 = PENDING
* 3 = MAINTENANCE
*/ */
class Monitor extends BeanModel { class Monitor extends BeanModel {
@ -84,31 +87,30 @@ class Monitor extends BeanModel {
expiryNotification: this.isEnabledExpiryNotification(), expiryNotification: this.isEnabledExpiryNotification(),
ignoreTls: this.getIgnoreTls(), ignoreTls: this.getIgnoreTls(),
upsideDown: this.isUpsideDown(), upsideDown: this.isUpsideDown(),
packetSize: this.packetSize,
maxredirects: this.maxredirects, maxredirects: this.maxredirects,
accepted_statuscodes: this.getAcceptedStatuscodes(), accepted_statuscodes: this.getAcceptedStatuscodes(),
dns_resolve_type: this.dns_resolve_type, dns_resolve_type: this.dns_resolve_type,
dns_resolve_server: this.dns_resolve_server, dns_resolve_server: this.dns_resolve_server,
dns_last_result: this.dns_last_result, dns_last_result: this.dns_last_result,
pushToken: this.pushToken,
docker_container: this.docker_container, docker_container: this.docker_container,
docker_host: this.docker_host, docker_host: this.docker_host,
proxyId: this.proxy_id, proxyId: this.proxy_id,
notificationIDList, notificationIDList,
tags: tags, tags: tags,
mqttUsername: this.mqttUsername, maintenance: await Monitor.isUnderMaintenance(this.id),
mqttPassword: this.mqttPassword,
mqttTopic: this.mqttTopic, mqttTopic: this.mqttTopic,
mqttSuccessMessage: this.mqttSuccessMessage, mqttSuccessMessage: this.mqttSuccessMessage,
databaseConnectionString: this.databaseConnectionString,
databaseQuery: this.databaseQuery, databaseQuery: this.databaseQuery,
authMethod: this.authMethod, authMethod: this.authMethod,
authWorkstation: this.authWorkstation, grpcUrl: this.grpcUrl,
authDomain: this.authDomain, grpcProtobuf: this.grpcProtobuf,
radiusUsername: this.radiusUsername, grpcMethod: this.grpcMethod,
radiusPassword: this.radiusPassword, grpcServiceName: this.grpcServiceName,
grpcEnableTls: this.getGrpcEnableTls(),
radiusCalledStationId: this.radiusCalledStationId, radiusCalledStationId: this.radiusCalledStationId,
radiusCallingStationId: this.radiusCallingStationId, radiusCallingStationId: this.radiusCallingStationId,
radiusSecret: this.radiusSecret, game: this.game,
}; };
if (includeSensitiveData) { if (includeSensitiveData) {
@ -116,12 +118,23 @@ class Monitor extends BeanModel {
...data, ...data,
headers: this.headers, headers: this.headers,
body: this.body, body: this.body,
grpcBody: this.grpcBody,
grpcMetadata: this.grpcMetadata,
basic_auth_user: this.basic_auth_user, basic_auth_user: this.basic_auth_user,
basic_auth_pass: this.basic_auth_pass, basic_auth_pass: this.basic_auth_pass,
pushToken: this.pushToken, pushToken: this.pushToken,
databaseConnectionString: this.databaseConnectionString,
radiusUsername: this.radiusUsername,
radiusPassword: this.radiusPassword,
radiusSecret: this.radiusSecret,
mqttUsername: this.mqttUsername,
mqttPassword: this.mqttPassword,
authWorkstation: this.authWorkstation,
authDomain: this.authDomain,
}; };
} }
data.includeSensitiveData = includeSensitiveData;
return data; return data;
} }
@ -166,6 +179,14 @@ class Monitor extends BeanModel {
return Boolean(this.upsideDown); return Boolean(this.upsideDown);
} }
/**
* Parse to boolean
* @returns {boolean}
*/
getGrpcEnableTls() {
return Boolean(this.grpcEnableTls);
}
/** /**
* Get accepted status codes * Get accepted status codes
* @returns {Object} * @returns {Object}
@ -229,7 +250,10 @@ class Monitor extends BeanModel {
} }
try { try {
if (this.type === "http" || this.type === "keyword") { if (await Monitor.isUnderMaintenance(this.id)) {
bean.msg = "Monitor under maintenance";
bean.status = MAINTENANCE;
} else if (this.type === "http" || this.type === "keyword") {
// Do not do any queries/high loading things before the "bean.ping" // Do not do any queries/high loading things before the "bean.ping"
let startTime = dayjs().valueOf(); let startTime = dayjs().valueOf();
@ -248,6 +272,7 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] Prepare Options for axios`); log.debug("monitor", `[${this.name}] Prepare Options for axios`);
// Axios Options
const options = { const options = {
url: this.url, url: this.url,
method: (this.method || "get").toLowerCase(), method: (this.method || "get").toLowerCase(),
@ -286,20 +311,8 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`); log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
log.debug("monitor", `[${this.name}] Axios Request`); log.debug("monitor", `[${this.name}] Axios Request`);
let res; // Make Request
if (this.auth_method === "ntlm") { let res = await this.makeAxiosRequest(options);
options.httpsAgent.keepAlive = true;
res = await httpNtlm(options, {
username: this.basic_auth_user,
password: this.basic_auth_pass,
domain: this.authDomain,
workstation: this.authWorkstation ? this.authWorkstation : undefined
});
} else {
res = await axios.request(options);
}
bean.msg = `${res.status} - ${res.statusText}`; bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime; bean.ping = dayjs().valueOf() - startTime;
@ -363,7 +376,7 @@ class Monitor extends BeanModel {
bean.status = UP; bean.status = UP;
} else if (this.type === "ping") { } else if (this.type === "ping") {
bean.ping = await ping(this.hostname); bean.ping = await ping(this.hostname, this.packetSize);
bean.msg = ""; bean.msg = "";
bean.status = UP; bean.status = UP;
} else if (this.type === "dns") { } else if (this.type === "dns") {
@ -473,39 +486,60 @@ class Monitor extends BeanModel {
bean.msg = res.data.response.servers[0].name; bean.msg = res.data.response.servers[0].name;
try { try {
bean.ping = await ping(this.hostname); bean.ping = await ping(this.hostname, this.packetSize);
} catch (_) { } } catch (_) { }
} else { } else {
throw new Error("Server not found on Steam"); throw new Error("Server not found on Steam");
} }
} else if (this.type === "gamedig") {
try {
const state = await Gamedig.query({
type: this.game,
host: this.hostname,
port: this.port,
givenPortOnly: true,
});
bean.msg = state.name;
bean.status = UP;
bean.ping = state.ping;
} catch (e) {
throw new Error(e.message);
}
} else if (this.type === "docker") { } else if (this.type === "docker") {
log.debug(`[${this.name}] Prepare Options for Axios`); log.debug("monitor", `[${this.name}] Prepare Options for Axios`);
const dockerHost = await R.load("docker_host", this.docker_host); const dockerHost = await R.load("docker_host", this.docker_host);
const options = { const options = {
url: `/containers/${this.docker_container}/json`, url: `/containers/${this.docker_container}/json`,
timeout: this.interval * 1000 * 0.8,
headers: { headers: {
"Accept": "*/*", "Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version, "User-Agent": "Uptime-Kuma/" + version,
}, },
httpsAgent: new https.Agent({ httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940) maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(), rejectUnauthorized: !this.getIgnoreTls(),
}),
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
maxCachedSessions: 0,
}), }),
}; };
if (dockerHost._dockerType === "socket") { if (dockerHost._dockerType === "socket") {
options.socketPath = dockerHost._dockerDaemon; options.socketPath = dockerHost._dockerDaemon;
} else if (dockerHost._dockerType === "tcp") { } else if (dockerHost._dockerType === "tcp") {
options.baseURL = dockerHost._dockerDaemon; options.baseURL = DockerHost.patchDockerURL(dockerHost._dockerDaemon);
} }
log.debug(`[${this.name}] Axios Request`); log.debug("monitor", `[${this.name}] Axios Request`);
let res = await axios.request(options); let res = await axios.request(options);
if (res.data.State.Running) { if (res.data.State.Running) {
bean.status = UP; bean.status = UP;
bean.msg = ""; bean.msg = res.data.State.Status;
} else {
throw Error("Container State is " + res.data.State.Status);
} }
} else if (this.type === "mqtt") { } else if (this.type === "mqtt") {
bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, { bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, {
@ -523,6 +557,37 @@ class Monitor extends BeanModel {
bean.msg = ""; bean.msg = "";
bean.status = UP; bean.status = UP;
bean.ping = dayjs().valueOf() - startTime; bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "grpc-keyword") {
let startTime = dayjs().valueOf();
const options = {
grpcUrl: this.grpcUrl,
grpcProtobufData: this.grpcProtobuf,
grpcServiceName: this.grpcServiceName,
grpcEnableTls: this.grpcEnableTls,
grpcMethod: this.grpcMethod,
grpcBody: this.grpcBody,
keyword: this.keyword
};
const response = await grpcQuery(options);
bean.ping = dayjs().valueOf() - startTime;
log.debug("monitor:", `gRPC response: ${JSON.stringify(response)}`);
let responseData = response.data;
if (responseData.length > 50) {
responseData = responseData.toString().substring(0, 47) + "...";
}
if (response.code !== 1) {
bean.status = DOWN;
bean.msg = `Error in send gRPC ${response.code} ${response.errorMessage}`;
} else {
if (response.data.toString().includes(this.keyword)) {
bean.status = UP;
bean.msg = `${responseData}, keyword [${this.keyword}] is found`;
} else {
log.debug("monitor:", `GRPC response [${response.data}] + ", but keyword [${this.keyword}] is not in [" + ${response.data} + "]"`);
bean.status = DOWN;
bean.msg = `, but keyword [${this.keyword}] is not in [" + ${responseData} + "]`;
}
}
} else if (this.type === "postgres") { } else if (this.type === "postgres") {
let startTime = dayjs().valueOf(); let startTime = dayjs().valueOf();
@ -531,8 +596,36 @@ class Monitor extends BeanModel {
bean.msg = ""; bean.msg = "";
bean.status = UP; bean.status = UP;
bean.ping = dayjs().valueOf() - startTime; bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "mysql") {
let startTime = dayjs().valueOf();
await mysqlQuery(this.databaseConnectionString, this.databaseQuery);
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "mongodb") {
let startTime = dayjs().valueOf();
await mongodbPing(this.databaseConnectionString);
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "radius") { } else if (this.type === "radius") {
let startTime = dayjs().valueOf(); let startTime = dayjs().valueOf();
// Handle monitors that were created before the
// update and as such don't have a value for
// this.port.
let port;
if (this.port == null) {
port = 1812;
} else {
port = this.port;
}
try { try {
const resp = await radius( const resp = await radius(
this.hostname, this.hostname,
@ -540,7 +633,8 @@ class Monitor extends BeanModel {
this.radiusPassword, this.radiusPassword,
this.radiusCalledStationId, this.radiusCalledStationId,
this.radiusCallingStationId, this.radiusCallingStationId,
this.radiusSecret this.radiusSecret,
port
); );
if (resp.code) { if (resp.code) {
bean.msg = resp.code; bean.msg = resp.code;
@ -555,9 +649,23 @@ class Monitor extends BeanModel {
} }
} }
bean.ping = dayjs().valueOf() - startTime; bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "redis") {
let startTime = dayjs().valueOf();
bean.msg = await redisPingAsync(this.databaseConnectionString);
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type in UptimeKumaServer.monitorTypeList) {
let startTime = dayjs().valueOf();
const monitorType = UptimeKumaServer.monitorTypeList[this.type];
await monitorType.check(this, bean);
if (!bean.ping) {
bean.ping = dayjs().valueOf() - startTime;
}
} else { } else {
bean.msg = "Unknown Monitor Type"; throw new Error("Unknown Monitor Type");
bean.status = PENDING;
} }
if (this.isUpsideDown()) { if (this.isUpsideDown()) {
@ -593,8 +701,12 @@ class Monitor extends BeanModel {
if (isImportant) { if (isImportant) {
bean.important = true; bean.important = true;
log.debug("monitor", `[${this.name}] sendNotification`); if (Monitor.isImportantForNotification(isFirstBeat, previousBeat?.status, bean.status)) {
await Monitor.sendNotification(isFirstBeat, this, bean); log.debug("monitor", `[${this.name}] sendNotification`);
await Monitor.sendNotification(isFirstBeat, this, bean);
} else {
log.debug("monitor", `[${this.name}] will not sendNotification because it is (or was) under maintenance`);
}
// Reset down count // Reset down count
bean.downCount = 0; bean.downCount = 0;
@ -603,6 +715,8 @@ class Monitor extends BeanModel {
log.debug("monitor", `[${this.name}] apicache clear`); log.debug("monitor", `[${this.name}] apicache clear`);
apicache.clear(); apicache.clear();
UptimeKumaServer.getInstance().sendMaintenanceListByUserID(this.user_id);
} else { } else {
bean.important = false; bean.important = false;
@ -626,11 +740,14 @@ class Monitor extends BeanModel {
beatInterval = this.retryInterval; beatInterval = this.retryInterval;
} }
log.warn("monitor", `Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`); log.warn("monitor", `Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === MAINTENANCE) {
log.warn("monitor", `Monitor #${this.id} '${this.name}': Under Maintenance | Type: ${this.type}`);
} else { } else {
log.warn("monitor", `Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type} | Down Count: ${bean.downCount} | Resend Interval: ${this.resendInterval}`); log.warn("monitor", `Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type} | Down Count: ${bean.downCount} | Resend Interval: ${this.resendInterval}`);
} }
log.debug("monitor", `[${this.name}] Send to socket`); log.debug("monitor", `[${this.name}] Send to socket`);
UptimeCacheList.clearCache(this.id);
io.to(this.user_id).emit("heartbeat", bean.toJSON()); io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id); Monitor.sendStats(io, this.id, this.user_id);
@ -677,6 +794,47 @@ class Monitor extends BeanModel {
} }
} }
/**
* Make a request using axios
* @param {Object} options Options for Axios
* @param {boolean} finalCall Should this be the final call i.e
* don't retry on faliure
* @returns {Object} Axios response
*/
async makeAxiosRequest(options, finalCall = false) {
try {
let res;
if (this.auth_method === "ntlm") {
options.httpsAgent.keepAlive = true;
res = await httpNtlm(options, {
username: this.basic_auth_user,
password: this.basic_auth_pass,
domain: this.authDomain,
workstation: this.authWorkstation ? this.authWorkstation : undefined
});
} else {
res = await axios.request(options);
}
return res;
} catch (e) {
// Fix #2253
// Read more: https://stackoverflow.com/questions/1759956/curl-error-18-transfer-closed-with-outstanding-read-data-remaining
if (!finalCall && typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
log.debug("monitor", "makeAxiosRequest with gzip");
options.headers["Accept-Encoding"] = "gzip, deflate";
return this.makeAxiosRequest(options, true);
} else {
if (typeof e.message === "string" && e.message.includes("maxContentLength size of -1 exceeded")) {
e.message = "response timeout: incomplete response within a interval";
}
throw e;
}
}
}
/** Stop monitor */ /** Stop monitor */
stop() { stop() {
clearTimeout(this.heartbeatInterval); clearTimeout(this.heartbeatInterval);
@ -815,7 +973,15 @@ class Monitor extends BeanModel {
* @param {number} duration Hours * @param {number} duration Hours
* @param {number} monitorID ID of monitor to calculate * @param {number} monitorID ID of monitor to calculate
*/ */
static async calcUptime(duration, monitorID) { static async calcUptime(duration, monitorID, forceNoCache = false) {
if (!forceNoCache) {
let cachedUptime = UptimeCacheList.getUptime(monitorID, duration);
if (cachedUptime != null) {
return cachedUptime;
}
}
const timeLogger = new TimeLogger(); const timeLogger = new TimeLogger();
const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour")); const startTime = R.isoDateTime(dayjs.utc().subtract(duration, "hour"));
@ -836,7 +1002,7 @@ class Monitor extends BeanModel {
-- SUM all uptime duration, also trim off the beat out of time window -- SUM all uptime duration, also trim off the beat out of time window
SUM( SUM(
CASE CASE
WHEN (status = 1) WHEN (status = 1 OR status = 3)
THEN THEN
CASE CASE
WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration WHEN (JULIANDAY(\`time\`) - JULIANDAY(?)) * 86400 < duration
@ -874,6 +1040,9 @@ class Monitor extends BeanModel {
} }
} }
// Cache
UptimeCacheList.addUptime(monitorID, duration, uptime);
return uptime; return uptime;
} }
@ -907,11 +1076,49 @@ class Monitor extends BeanModel {
// DOWN -> PENDING = this case not exists // DOWN -> PENDING = this case not exists
// DOWN -> DOWN = not important // DOWN -> DOWN = not important
// * DOWN -> UP = important // * DOWN -> UP = important
let isImportant = isFirstBeat || // MAINTENANCE -> MAINTENANCE = not important
// * MAINTENANCE -> UP = important
// * MAINTENANCE -> DOWN = important
// * DOWN -> MAINTENANCE = important
// * UP -> MAINTENANCE = important
return isFirstBeat ||
(previousBeatStatus === DOWN && currentBeatStatus === MAINTENANCE) ||
(previousBeatStatus === UP && currentBeatStatus === MAINTENANCE) ||
(previousBeatStatus === MAINTENANCE && currentBeatStatus === DOWN) ||
(previousBeatStatus === MAINTENANCE && currentBeatStatus === UP) ||
(previousBeatStatus === UP && currentBeatStatus === DOWN) ||
(previousBeatStatus === DOWN && currentBeatStatus === UP) ||
(previousBeatStatus === PENDING && currentBeatStatus === DOWN);
}
/**
* Is this beat important for notifications?
* @param {boolean} isFirstBeat Is this the first beat of this monitor?
* @param {const} previousBeatStatus Status of the previous beat
* @param {const} currentBeatStatus Status of the current beat
* @returns {boolean} True if is an important beat else false
*/
static isImportantForNotification(isFirstBeat, previousBeatStatus, currentBeatStatus) {
// * ? -> ANY STATUS = important [isFirstBeat]
// UP -> PENDING = not important
// * UP -> DOWN = important
// UP -> UP = not important
// PENDING -> PENDING = not important
// * PENDING -> DOWN = important
// PENDING -> UP = not important
// DOWN -> PENDING = this case not exists
// DOWN -> DOWN = not important
// * DOWN -> UP = important
// MAINTENANCE -> MAINTENANCE = not important
// MAINTENANCE -> UP = not important
// * MAINTENANCE -> DOWN = important
// DOWN -> MAINTENANCE = not important
// UP -> MAINTENANCE = not important
return isFirstBeat ||
(previousBeatStatus === MAINTENANCE && currentBeatStatus === DOWN) ||
(previousBeatStatus === UP && currentBeatStatus === DOWN) || (previousBeatStatus === UP && currentBeatStatus === DOWN) ||
(previousBeatStatus === DOWN && currentBeatStatus === UP) || (previousBeatStatus === DOWN && currentBeatStatus === UP) ||
(previousBeatStatus === PENDING && currentBeatStatus === DOWN); (previousBeatStatus === PENDING && currentBeatStatus === DOWN);
return isImportant;
} }
/** /**
@ -935,7 +1142,13 @@ class Monitor extends BeanModel {
for (let notification of notificationList) { for (let notification of notificationList) {
try { try {
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), bean.toJSON()); // Prevent if the msg is undefined, notifications such as Discord cannot send out.
const heartbeatJSON = bean.toJSON();
if (!heartbeatJSON["msg"]) {
heartbeatJSON["msg"] = "N/A";
}
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), heartbeatJSON);
} catch (e) { } catch (e) {
log.error("monitor", "Cannot send notification to " + notification.name); log.error("monitor", "Cannot send notification to " + notification.name);
log.error("monitor", e); log.error("monitor", e);
@ -1048,6 +1261,36 @@ class Monitor extends BeanModel {
monitorID monitorID
]); ]);
} }
/**
* Check if monitor is under maintenance
* @param {number} monitorID ID of monitor to check
* @returns {Promise<boolean>}
*/
static async isUnderMaintenance(monitorID) {
let activeCondition = Maintenance.getActiveMaintenanceSQLCondition();
const maintenance = await R.getRow(`
SELECT COUNT(*) AS count
FROM monitor_maintenance mm
JOIN maintenance
ON mm.maintenance_id = maintenance.id
AND mm.monitor_id = ?
LEFT JOIN maintenance_timeslot
ON maintenance_timeslot.maintenance_id = maintenance.id
WHERE ${activeCondition}
LIMIT 1`, [ monitorID ]);
return maintenance.count !== 0;
}
/** Make sure monitor interval is between bounds */
validate() {
if (this.interval > MAX_INTERVAL_SECOND) {
throw new Error(`Interval cannot be more than ${MAX_INTERVAL_SECOND} seconds`);
}
if (this.interval < MIN_INTERVAL_SECOND) {
throw new Error(`Interval cannot be less than ${MIN_INTERVAL_SECOND} seconds`);
}
}
} }
module.exports = Monitor; module.exports = Monitor;

@ -2,6 +2,8 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const cheerio = require("cheerio"); const cheerio = require("cheerio");
const { UptimeKumaServer } = require("../uptime-kuma-server"); const { UptimeKumaServer } = require("../uptime-kuma-server");
const jsesc = require("jsesc");
const Maintenance = require("./maintenance");
class StatusPage extends BeanModel { class StatusPage extends BeanModel {
@ -36,7 +38,7 @@ class StatusPage extends BeanModel {
*/ */
static async renderHTML(indexHTML, statusPage) { static async renderHTML(indexHTML, statusPage) {
const $ = cheerio.load(indexHTML); const $ = cheerio.load(indexHTML);
const description155 = statusPage.description?.substring(0, 155); const description155 = statusPage.description?.substring(0, 155) ?? "";
$("title").text(statusPage.title); $("title").text(statusPage.title);
$("meta[name=description]").attr("content", description155); $("meta[name=description]").attr("content", description155);
@ -56,13 +58,19 @@ class StatusPage extends BeanModel {
head.append(`<meta property="og:description" content="${description155}" />`); head.append(`<meta property="og:description" content="${description155}" />`);
// Preload data // Preload data
const json = JSON.stringify(await StatusPage.getStatusPageData(statusPage)); // Add jsesc, fix https://github.com/louislam/uptime-kuma/issues/2186
head.append(` const escapedJSONObject = jsesc(await StatusPage.getStatusPageData(statusPage), {
<script> "isScriptContext": true
window.preloadData = ${json} });
const script = $(`
<script id="preload-data" data-json="{}">
window.preloadData = ${escapedJSONObject};
</script> </script>
`); `);
head.append(script);
// manifest.json // manifest.json
$("link[rel=manifest]").attr("href", `/api/status-page/${statusPage.slug}/manifest.json`); $("link[rel=manifest]").attr("href", `/api/status-page/${statusPage.slug}/manifest.json`);
@ -83,6 +91,8 @@ class StatusPage extends BeanModel {
incident = incident.toPublicJSON(); incident = incident.toPublicJSON();
} }
let maintenanceList = await StatusPage.getMaintenanceList(statusPage.id);
// Public Group List // Public Group List
const publicGroupList = []; const publicGroupList = [];
const showTags = !!statusPage.show_tags; const showTags = !!statusPage.show_tags;
@ -100,7 +110,8 @@ class StatusPage extends BeanModel {
return { return {
config: await statusPage.toPublicJSON(), config: await statusPage.toPublicJSON(),
incident, incident,
publicGroupList publicGroupList,
maintenanceList,
}; };
} }
@ -259,6 +270,38 @@ class StatusPage extends BeanModel {
} }
} }
/**
* Get list of maintenances
* @param {number} statusPageId ID of status page to get maintenance for
* @returns {Object} Object representing maintenances sanitized for public
*/
static async getMaintenanceList(statusPageId) {
try {
const publicMaintenanceList = [];
let activeCondition = Maintenance.getActiveMaintenanceSQLCondition();
let maintenanceBeanList = R.convertToBeans("maintenance", await R.getAll(`
SELECT DISTINCT maintenance.*
FROM maintenance
JOIN maintenance_status_page
ON maintenance_status_page.maintenance_id = maintenance.id
AND maintenance_status_page.status_page_id = ?
LEFT JOIN maintenance_timeslot
ON maintenance_timeslot.maintenance_id = maintenance.id
WHERE ${activeCondition}
ORDER BY maintenance.end_date
`, [ statusPageId ]));
for (const bean of maintenanceBeanList) {
publicMaintenanceList.push(await bean.toPublicJSON());
}
return publicMaintenanceList;
} catch (error) {
return [];
}
}
} }
module.exports = StatusPage; module.exports = StatusPage;

@ -0,0 +1,20 @@
import { PluginFunc, ConfigType } from 'dayjs'
declare const plugin: PluginFunc
export = plugin
declare module 'dayjs' {
interface Dayjs {
tz(timezone?: string, keepLocalTime?: boolean): Dayjs
offsetName(type?: 'short' | 'long'): string | undefined
}
interface DayjsTimezone {
(date: ConfigType, timezone?: string): Dayjs
(date: ConfigType, format: string, timezone?: string): Dayjs
guess(): string
setDefault(timezone?: string): void
}
const tz: DayjsTimezone
}

@ -0,0 +1,115 @@
/**
* Copy from node_modules/dayjs/plugin/timezone.js
* Try to fix https://github.com/louislam/uptime-kuma/issues/2318
* Source: https://github.com/iamkun/dayjs/tree/dev/src/plugin/utc
* License: MIT
*/
!function (t, e) {
// eslint-disable-next-line no-undef
typeof exports == "object" && typeof module != "undefined" ? module.exports = e() : typeof define == "function" && define.amd ? define(e) : (t = typeof globalThis != "undefined" ? globalThis : t || self).dayjs_plugin_timezone = e();
}(this, (function () {
"use strict";
let t = {
year: 0,
month: 1,
day: 2,
hour: 3,
minute: 4,
second: 5
};
let e = {};
return function (n, i, o) {
let r;
let a = function (t, n, i) {
void 0 === i && (i = {});
let o = new Date(t);
let r = function (t, n) {
void 0 === n && (n = {});
let i = n.timeZoneName || "short";
let o = t + "|" + i;
let r = e[o];
return r || (r = new Intl.DateTimeFormat("en-US", {
hour12: !1,
timeZone: t,
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZoneName: i
}), e[o] = r), r;
}(n, i);
return r.formatToParts(o);
};
let u = function (e, n) {
let i = a(e, n);
let r = [];
let u = 0;
for (; u < i.length; u += 1) {
let f = i[u];
let s = f.type;
let m = f.value;
let c = t[s];
c >= 0 && (r[c] = parseInt(m, 10));
}
let d = r[3];
let l = d === 24 ? 0 : d;
let v = r[0] + "-" + r[1] + "-" + r[2] + " " + l + ":" + r[4] + ":" + r[5] + ":000";
let h = +e;
return (o.utc(v).valueOf() - (h -= h % 1e3)) / 6e4;
};
let f = i.prototype;
f.tz = function (t, e) {
void 0 === t && (t = r);
let n = this.utcOffset();
let i = this.toDate();
let a = i.toLocaleString("en-US", { timeZone: t }).replace("\u202f", " ");
let u = Math.round((i - new Date(a)) / 1e3 / 60);
let f = o(a).$set("millisecond", this.$ms).utcOffset(15 * -Math.round(i.getTimezoneOffset() / 15) - u, !0);
if (e) {
let s = f.utcOffset();
f = f.add(n - s, "minute");
}
return f.$x.$timezone = t, f;
}, f.offsetName = function (t) {
let e = this.$x.$timezone || o.tz.guess();
let n = a(this.valueOf(), e, { timeZoneName: t }).find((function (t) {
return t.type.toLowerCase() === "timezonename";
}));
return n && n.value;
};
let s = f.startOf;
f.startOf = function (t, e) {
if (!this.$x || !this.$x.$timezone) {
return s.call(this, t, e);
}
let n = o(this.format("YYYY-MM-DD HH:mm:ss:SSS"));
return s.call(n, t, e).tz(this.$x.$timezone, !0);
}, o.tz = function (t, e, n) {
let i = n && e;
let a = n || e || r;
let f = u(+o(), a);
if (typeof t != "string") {
return o(t).tz(a);
}
let s = function (t, e, n) {
let i = t - 60 * e * 1e3;
let o = u(i, n);
if (e === o) {
return [ i, e ];
}
let r = u(i -= 60 * (o - e) * 1e3, n);
return o === r ? [ i, o ] : [ t - 60 * Math.min(o, r) * 1e3, Math.max(o, r) ];
}(o.utc(t, i).valueOf(), f, a);
let m = s[0];
let c = s[1];
let d = o(m).utcOffset(c);
return d.$x.$timezone = a, d;
}, o.tz.guess = function () {
return Intl.DateTimeFormat().resolvedOptions().timeZone;
}, o.tz.setDefault = function (t) {
r = t;
};
};
}));

@ -0,0 +1,19 @@
class MonitorType {
name = undefined;
/**
*
* @param {Monitor} monitor
* @param {Heartbeat} heartbeat
* @returns {Promise<void>}
*/
async check(monitor, heartbeat) {
throw new Error("You need to override check()");
}
}
module.exports = {
MonitorType,
};

@ -28,17 +28,17 @@ class Bark extends NotificationProvider {
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === UP) { if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === UP) {
let title = "UptimeKuma Monitor Up"; let title = "UptimeKuma Monitor Up";
return await this.postNotification(title, msg, barkEndpoint); return await this.postNotification(notification, title, msg, barkEndpoint);
} }
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === DOWN) { if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === DOWN) {
let title = "UptimeKuma Monitor Down"; let title = "UptimeKuma Monitor Down";
return await this.postNotification(title, msg, barkEndpoint); return await this.postNotification(notification, title, msg, barkEndpoint);
} }
if (msg != null) { if (msg != null) {
let title = "UptimeKuma Message"; let title = "UptimeKuma Message";
return await this.postNotification(title, msg, barkEndpoint); return await this.postNotification(notification, title, msg, barkEndpoint);
} }
} }
@ -50,7 +50,7 @@ class Bark extends NotificationProvider {
*/ */
appendAdditionalParameters(notification, postUrl) { appendAdditionalParameters(notification, postUrl) {
// set icon to uptime kuma icon, 11kb should be fine // set icon to uptime kuma icon, 11kb should be fine
postUrl += "&icon=" + barkNotificationAvatar; postUrl += "?icon=" + barkNotificationAvatar;
// grouping all our notifications // grouping all our notifications
if (notification.barkGroup != null) { if (notification.barkGroup != null) {
postUrl += "&group=" + notification.barkGroup; postUrl += "&group=" + notification.barkGroup;
@ -89,12 +89,12 @@ class Bark extends NotificationProvider {
* @param {string} endpoint Endpoint to send request to * @param {string} endpoint Endpoint to send request to
* @returns {string} * @returns {string}
*/ */
async postNotification(title, subtitle, endpoint) { async postNotification(notification, title, subtitle, endpoint) {
// url encode title and subtitle // url encode title and subtitle
title = encodeURIComponent(title); title = encodeURIComponent(title);
subtitle = encodeURIComponent(subtitle); subtitle = encodeURIComponent(subtitle);
let postUrl = endpoint + "/" + title + "/" + subtitle; let postUrl = endpoint + "/" + title + "/" + subtitle;
postUrl = this.appendAdditionalParameters(postUrl); postUrl = this.appendAdditionalParameters(notification, postUrl);
let result = await axios.get(postUrl); let result = await axios.get(postUrl);
this.checkResult(result); this.checkResult(result);
if (result.statusText != null) { if (result.statusText != null) {

@ -8,7 +8,6 @@ class ClickSendSMS extends NotificationProvider {
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) { async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully."; let okMsg = "Sent Successfully.";
try { try {
console.log({ notification });
let config = { let config = {
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",

@ -64,7 +64,7 @@ class Discord extends NotificationProvider {
}, },
{ {
name: "Error", name: "Error",
value: heartbeatJSON["msg"], value: heartbeatJSON["msg"] == null ? "N/A" : heartbeatJSON["msg"],
}, },
], ],
}], }],
@ -91,7 +91,7 @@ class Discord extends NotificationProvider {
}, },
{ {
name: monitorJSON["type"] === "push" ? "Service Type" : "Service URL", name: monitorJSON["type"] === "push" ? "Service Type" : "Service URL",
value: monitorJSON["type"] === "push" ? "Heartbeat" : address.startsWith("http") ? "[Visit Service](" + address + ")" : address, value: monitorJSON["type"] === "push" ? "Heartbeat" : address,
}, },
{ {
name: "Time (UTC)", name: "Time (UTC)",

@ -0,0 +1,24 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class FreeMobile extends NotificationProvider {
name = "FreeMobile";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
await axios.post(`https://smsapi.free-mobile.fr/sendmsg?msg=${encodeURIComponent(msg.replace("🔴", "⛔️"))}`, {
"user": notification.freemobileUser,
"pass": notification.freemobilePass,
});
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = FreeMobile;

@ -22,7 +22,7 @@ class GoAlert extends NotificationProvider {
let config = { let config = {
headers: headers headers: headers
}; };
let resp = await axios.post(`${notification.goAlertBaseURL}/api/v2/generic/incoming?token=${notification.goAlertToken}`, data, config); await axios.post(`${notification.goAlertBaseURL}/api/v2/generic/incoming?token=${notification.goAlertToken}`, data, config);
return okMsg; return okMsg;
} catch (error) { } catch (error) {

@ -0,0 +1,31 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class Kook extends NotificationProvider {
name = "Kook";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let url = "https://www.kookapp.cn/api/v3/message/create";
let data = {
target_id: notification.kookGuildID,
content: msg,
};
let config = {
headers: {
"Authorization": "Bot " + notification.kookBotToken,
"Content-Type": "application/json",
},
};
try {
await axios.post(url, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Kook;

@ -8,12 +8,24 @@ class Ntfy extends NotificationProvider {
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) { async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully."; let okMsg = "Sent Successfully.";
try { try {
await axios.post(`${notification.ntfyserverurl}`, { let headers = {};
if (notification.ntfyusername) {
headers = {
"Authorization": "Basic " + Buffer.from(notification.ntfyusername + ":" + notification.ntfypassword).toString("base64"),
};
}
let data = {
"topic": notification.ntfytopic, "topic": notification.ntfytopic,
"message": msg, "message": msg,
"priority": notification.ntfyPriority || 4, "priority": notification.ntfyPriority || 4,
"title": "Uptime-Kuma", "title": "Uptime-Kuma",
}); };
if (notification.ntfyIcon) {
data.icon = notification.ntfyIcon;
}
await axios.post(`${notification.ntfyserverurl}`, data, { headers: headers });
return okMsg; return okMsg;

@ -10,7 +10,7 @@ class Octopush extends NotificationProvider {
try { try {
// Default - V2 // Default - V2
if (notification.octopushVersion === 2 || !notification.octopushVersion) { if (notification.octopushVersion === "2" || !notification.octopushVersion) {
let config = { let config = {
headers: { headers: {
"api-key": notification.octopushAPIKey, "api-key": notification.octopushAPIKey,
@ -31,7 +31,7 @@ class Octopush extends NotificationProvider {
"sender": notification.octopushSenderName "sender": notification.octopushSenderName
}; };
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config); await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config);
} else if (notification.octopushVersion === 1) { } else if (notification.octopushVersion === "1") {
let data = { let data = {
"user_login": notification.octopushDMLogin, "user_login": notification.octopushDMLogin,
"api_key": notification.octopushDMAPIKey, "api_key": notification.octopushDMAPIKey,
@ -49,7 +49,15 @@ class Octopush extends NotificationProvider {
}, },
params: data params: data
}; };
await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config);
// V1 API returns 200 even on error so we must check
// response data
let response = await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config);
if ("error_code" in response.data) {
if (response.data.error_code !== "000") {
this.throwGeneralAxiosError(`Octopush error ${JSON.stringify(response.data)}`);
}
}
} else { } else {
throw new Error("Unknown Octopush version!"); throw new Error("Unknown Octopush version!");
} }

@ -8,6 +8,14 @@ class PromoSMS extends NotificationProvider {
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) { async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully."; let okMsg = "Sent Successfully.";
if (notification.promosmsAllowLongSMS === undefined) {
notification.promosmsAllowLongSMS = false;
}
//TODO: Add option for enabling special characters. It will decrese message max length from 160 to 70 chars.
//Lets remove non ascii char
let cleanMsg = msg.replace(/[^\x00-\x7F]/g, "");
try { try {
let config = { let config = {
headers: { headers: {
@ -18,8 +26,9 @@ class PromoSMS extends NotificationProvider {
}; };
let data = { let data = {
"recipients": [ notification.promosmsPhoneNumber ], "recipients": [ notification.promosmsPhoneNumber ],
//Lets remove non ascii char //Trim message to maximum length of 1 SMS or 4 if we allowed long messages
"text": msg.replace(/[^\x00-\x7F]/g, ""), "text": notification.promosmsAllowLongSMS ? cleanMsg.substring(0, 639) : cleanMsg.substring(0, 159),
"long-sms": notification.promosmsAllowLongSMS,
"type": Number(notification.promosmsSMSType), "type": Number(notification.promosmsSMSType),
"sender": notification.promosmsSenderName "sender": notification.promosmsSenderName
}; };

@ -19,26 +19,26 @@ class Pushbullet extends NotificationProvider {
} }
}; };
if (heartbeatJSON == null) { if (heartbeatJSON == null) {
let testdata = { let data = {
"type": "note", "type": "note",
"title": "Uptime Kuma Alert", "title": "Uptime Kuma Alert",
"body": "Testing Successful.", "body": msg,
}; };
await axios.post(pushbulletUrl, testdata, config); await axios.post(pushbulletUrl, data, config);
} else if (heartbeatJSON["status"] === DOWN) { } else if (heartbeatJSON["status"] === DOWN) {
let downdata = { let downData = {
"type": "note", "type": "note",
"title": "UptimeKuma Alert: " + monitorJSON["name"], "title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"], "body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}; };
await axios.post(pushbulletUrl, downdata, config); await axios.post(pushbulletUrl, downData, config);
} else if (heartbeatJSON["status"] === UP) { } else if (heartbeatJSON["status"] === UP) {
let updata = { let upData = {
"type": "note", "type": "note",
"title": "UptimeKuma Alert: " + monitorJSON["name"], "title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"], "body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}; };
await axios.post(pushbulletUrl, updata, config); await axios.post(pushbulletUrl, upData, config);
} }
return okMsg; return okMsg;
} catch (error) { } catch (error) {

@ -10,7 +10,7 @@ class Pushover extends NotificationProvider {
let pushoverlink = "https://api.pushover.net/1/messages.json"; let pushoverlink = "https://api.pushover.net/1/messages.json";
let data = { let data = {
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg, "message": msg,
"user": notification.pushoveruserkey, "user": notification.pushoveruserkey,
"token": notification.pushoverapptoken, "token": notification.pushoverapptoken,
"sound": notification.pushoversounds, "sound": notification.pushoversounds,

@ -0,0 +1,42 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class ServerChan extends NotificationProvider {
name = "ServerChan";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
await axios.post(`https://sctapi.ftqq.com/${notification.serverChanSendKey}.send`, {
"title": this.checkStatus(heartbeatJSON, monitorJSON),
"desp": msg,
});
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
/**
* Get the formatted title for message
* @param {?Object} monitorJSON Monitor details (For Up/Down only)
* @param {?Object} heartbeatJSON Heartbeat details (For Up/Down only)
* @returns {string} Formatted title
*/
checkStatus(heartbeatJSON, monitorJSON) {
let title = "UptimeKuma Message";
if (heartbeatJSON != null && heartbeatJSON["status"] === UP) {
title = "UptimeKuma Monitor Up " + monitorJSON["name"];
}
if (heartbeatJSON != null && heartbeatJSON["status"] === DOWN) {
title = "UptimeKuma Monitor Down " + monitorJSON["name"];
}
return title;
}
}
module.exports = ServerChan;

@ -1,7 +1,7 @@
const NotificationProvider = require("./notification-provider"); const NotificationProvider = require("./notification-provider");
const axios = require("axios"); const axios = require("axios");
const { setSettings, setting } = require("../util-server"); const { setSettings, setting } = require("../util-server");
const { getMonitorRelativeURL } = require("../../src/util"); const { getMonitorRelativeURL, UP } = require("../../src/util");
class Slack extends NotificationProvider { class Slack extends NotificationProvider {
@ -46,24 +46,31 @@ class Slack extends NotificationProvider {
"channel": notification.slackchannel, "channel": notification.slackchannel,
"username": notification.slackusername, "username": notification.slackusername,
"icon_emoji": notification.slackiconemo, "icon_emoji": notification.slackiconemo,
"blocks": [{ "attachments": [
"type": "header",
"text": {
"type": "plain_text",
"text": "Uptime Kuma Alert",
},
},
{
"type": "section",
"fields": [{
"type": "mrkdwn",
"text": "*Message*\n" + msg,
},
{ {
"type": "mrkdwn", "color": (heartbeatJSON["status"] === UP) ? "#2eb886" : "#e01e5a",
"text": "*Time (UTC)*\n" + time, "blocks": [
}], {
}], "type": "header",
"text": {
"type": "plain_text",
"text": "Uptime Kuma Alert",
},
},
{
"type": "section",
"fields": [{
"type": "mrkdwn",
"text": "*Message*\n" + msg,
},
{
"type": "mrkdwn",
"text": "*Time (UTC)*\n" + time,
}],
}
],
}
]
}; };
if (notification.slackbutton) { if (notification.slackbutton) {
@ -74,17 +81,19 @@ class Slack extends NotificationProvider {
// Button // Button
if (baseURL) { if (baseURL) {
data.blocks.push({ data.attachments.forEach(element => {
"type": "actions", element.blocks.push({
"elements": [{ "type": "actions",
"type": "button", "elements": [{
"text": { "type": "button",
"type": "plain_text", "text": {
"text": "Visit Uptime Kuma", "type": "plain_text",
}, "text": "Visit Uptime Kuma",
"value": "Uptime-Kuma", },
"url": baseURL + getMonitorRelativeURL(monitorJSON.id), "value": "Uptime-Kuma",
}], "url": baseURL + getMonitorRelativeURL(monitorJSON.id),
}],
});
}); });
} }

@ -0,0 +1,71 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class SMSEagle extends NotificationProvider {
name = "SMSEagle";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let config = {
headers: {
"Content-Type": "application/json",
}
};
let postData;
let sendMethod;
let recipientType;
let encoding = (notification.smseagleEncoding) ? "1" : "0";
let priority = (notification.smseaglePriority) ? notification.smseaglePriority : "0";
if (notification.smseagleRecipientType === "smseagle-contact") {
recipientType = "contactname";
sendMethod = "sms.send_tocontact";
}
if (notification.smseagleRecipientType === "smseagle-group") {
recipientType = "groupname";
sendMethod = "sms.send_togroup";
}
if (notification.smseagleRecipientType === "smseagle-to") {
recipientType = "to";
sendMethod = "sms.send_sms";
}
let params = {
access_token: notification.smseagleToken,
[recipientType]: notification.smseagleRecipient,
message: msg,
responsetype: "extended",
unicode: encoding,
highpriority: priority
};
postData = {
method: sendMethod,
params: params
};
let resp = await axios.post(notification.smseagleUrl + "/jsonrpc/sms", postData, config);
if ((JSON.stringify(resp.data)).indexOf("message_id") === -1) {
let error = "";
if (resp.data.result && resp.data.result.error_text) {
error = `SMSEagle API returned error: ${JSON.stringify(resp.data.result.error_text)}`;
} else {
error = "SMSEagle API returned an unexpected response";
}
throw new Error(error);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = SMSEagle;

@ -0,0 +1,25 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class SMSManager extends NotificationProvider {
name = "SMSManager";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
try {
let data = {
apikey: notification.smsmanagerApiKey,
endpoint: "https://http-api.smsmanager.cz/Send",
message: msg.replace(/[^\x00-\x7F]/g, ""),
to: notification.numbers,
messageType: notification.messageType,
};
await axios.get(`${data.endpoint}?apikey=${data.apikey}&message=${data.message}&number=${data.to}&gateway=${data.messageType}`);
return "SMS sent sucessfully.";
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = SMSManager;

@ -0,0 +1,113 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { UP, DOWN, getMonitorRelativeURL } = require("../../src/util");
const { setting } = require("../util-server");
let successMessage = "Sent Successfully.";
class Splunk extends NotificationProvider {
name = "Splunk";
/**
* @inheritdoc
*/
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
try {
if (heartbeatJSON == null) {
const title = "Uptime Kuma Alert";
const monitor = {
type: "ping",
url: "Uptime Kuma Test Button",
};
return this.postNotification(notification, title, msg, monitor, "trigger");
}
if (heartbeatJSON.status === UP) {
const title = "Uptime Kuma Monitor ✅ Up";
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, "recovery");
}
if (heartbeatJSON.status === DOWN) {
const title = "Uptime Kuma Monitor 🔴 Down";
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, "trigger");
}
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
/**
* Check if result is successful, result code should be in range 2xx
* @param {Object} result Axios response object
* @throws {Error} The status code is not in range 2xx
*/
checkResult(result) {
if (result.status == null) {
throw new Error("Splunk notification failed with invalid response!");
}
if (result.status < 200 || result.status >= 300) {
throw new Error("Splunk notification failed with status code " + result.status);
}
}
/**
* Send the message
* @param {BeanModel} notification Message title
* @param {string} title Message title
* @param {string} body Message
* @param {Object} monitorInfo Monitor details (For Up/Down only)
* @param {?string} eventAction Action event for PagerDuty (trigger, acknowledge, resolve)
* @returns {string}
*/
async postNotification(notification, title, body, monitorInfo, eventAction = "trigger") {
let monitorUrl;
if (monitorInfo.type === "port") {
monitorUrl = monitorInfo.hostname;
if (monitorInfo.port) {
monitorUrl += ":" + monitorInfo.port;
}
} else if (monitorInfo.hostname != null) {
monitorUrl = monitorInfo.hostname;
} else {
monitorUrl = monitorInfo.url;
}
if (eventAction === "recovery") {
if (notification.splunkAutoResolve === "0") {
return "No action required";
}
eventAction = notification.splunkAutoResolve;
} else {
eventAction = notification.splunkSeverity;
}
const options = {
method: "POST",
url: notification.splunkRestURL,
headers: { "Content-Type": "application/json" },
data: {
message_type: eventAction,
state_message: `[${title}] [${monitorUrl}] ${body}`,
entity_display_name: "Uptime Kuma Alert: " + monitorInfo.name,
routing_key: notification.pagerdutyIntegrationKey,
entity_id: "Uptime Kuma/" + monitorInfo.id,
}
};
const baseURL = await setting("primaryBaseURL");
if (baseURL && monitorInfo) {
options.client = "Uptime Kuma";
options.client_url = baseURL + getMonitorRelativeURL(monitorInfo.id);
}
let result = await axios.request(options);
this.checkResult(result);
if (result.statusText != null) {
return "Splunk notification succeed: " + result.statusText;
}
return successMessage;
}
}
module.exports = Splunk;

@ -0,0 +1,76 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN } = require("../../src/util");
class Squadcast extends NotificationProvider {
name = "squadcast";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let config = {};
let data = {
message: msg,
description: "",
tags: {},
heartbeat: heartbeatJSON,
source: "uptime-kuma"
};
if (heartbeatJSON !== null) {
data.description = heartbeatJSON["msg"];
data.event_id = heartbeatJSON["monitorID"];
if (heartbeatJSON["status"] === DOWN) {
data.message = `${monitorJSON["name"]} is DOWN`;
data.status = "trigger";
} else {
data.message = `${monitorJSON["name"]} is UP`;
data.status = "resolve";
}
let address;
switch (monitorJSON["type"]) {
case "ping":
address = monitorJSON["hostname"];
break;
case "port":
case "dns":
case "steam":
address = monitorJSON["hostname"];
if (monitorJSON["port"]) {
address += ":" + monitorJSON["port"];
}
break;
default:
address = monitorJSON["url"];
break;
}
data.tags["AlertAddress"] = address;
monitorJSON["tags"].forEach(tag => {
data.tags[tag["name"]] = {
value: tag["value"]
};
if (tag["color"] !== null) {
data.tags[tag["name"]]["color"] = tag["color"];
}
});
}
await axios.post(notification.squadcastWebhookURL, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Squadcast;

@ -63,7 +63,7 @@ class Teams extends NotificationProvider {
}); });
} }
if (monitorUrl) { if (monitorUrl && monitorUrl !== "https://") {
facts.push({ facts.push({
name: "URL", name: "URL",
value: monitorUrl, value: monitorUrl,
@ -127,13 +127,17 @@ class Teams extends NotificationProvider {
let url; let url;
if (monitorJSON["type"] === "port") { switch (monitorJSON["type"]) {
url = monitorJSON["hostname"]; case "http":
if (monitorJSON["port"]) { case "keywork":
url += ":" + monitorJSON["port"]; url = monitorJSON["url"];
} break;
} else { case "docker":
url = monitorJSON["url"]; url = monitorJSON["docker_host"];
break;
default:
url = monitorJSON["hostname"];
break;
} }
const payload = this._notificationPayloadFactory({ const payload = this._notificationPayloadFactory({

@ -16,20 +16,29 @@ class Webhook extends NotificationProvider {
msg, msg,
}; };
let finalData; let finalData;
let config = {}; let config = {
headers: {}
};
if (notification.webhookContentType === "form-data") { if (notification.webhookContentType === "form-data") {
finalData = new FormData(); finalData = new FormData();
finalData.append("data", JSON.stringify(data)); finalData.append("data", JSON.stringify(data));
config.headers = finalData.getHeaders();
config = {
headers: finalData.getHeaders(),
};
} else { } else {
finalData = data; finalData = data;
} }
if (notification.webhookAdditionalHeaders) {
try {
config.headers = {
...config.headers,
...JSON.parse(notification.webhookAdditionalHeaders)
};
} catch (err) {
throw "Additional Headers is not a valid JSON";
}
}
await axios.post(notification.webhookURL, finalData, config); await axios.post(notification.webhookURL, finalData, config);
return okMsg; return okMsg;

@ -0,0 +1,116 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class ZohoCliq extends NotificationProvider {
name = "ZohoCliq";
/**
* Generate the message to send
* @param {const} status The status constant
* @param {string} monitorName Name of monitor
* @returns {string}
*/
_statusMessageFactory = (status, monitorName) => {
if (status === DOWN) {
return `🔴 Application [${monitorName}] went down\n`;
} else if (status === UP) {
return `✅ Application [${monitorName}] is back online\n`;
}
return "Notification\n";
};
/**
* Send the notification
* @param {string} webhookUrl URL to send the request to
* @param {Array} payload Payload generated by _notificationPayloadFactory
*/
_sendNotification = async (webhookUrl, payload) => {
await axios.post(webhookUrl, { text: payload.join("\n") });
};
/**
* Generate payload for notification
* @param {const} status The status of the monitor
* @param {string} monitorMessage Message to send
* @param {string} monitorName Name of monitor affected
* @param {string} monitorUrl URL of monitor affected
* @returns {Array}
*/
_notificationPayloadFactory = ({
status,
monitorMessage,
monitorName,
monitorUrl,
}) => {
const payload = [];
payload.push("### Uptime Kuma\n");
payload.push(this._statusMessageFactory(status, monitorName));
payload.push(`*Description:* ${monitorMessage}`);
if (monitorName) {
payload.push(`*Monitor:* ${monitorName}`);
}
if (monitorUrl && monitorUrl !== "https://") {
payload.push(`*URL:* [${monitorUrl}](${monitorUrl})`);
}
return payload;
};
/**
* Send a general notification
* @param {string} webhookUrl URL to send request to
* @param {string} msg Message to send
* @returns {Promise<void>}
*/
_handleGeneralNotification = (webhookUrl, msg) => {
const payload = this._notificationPayloadFactory({
monitorMessage: msg
});
return this._sendNotification(webhookUrl, payload);
};
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
if (heartbeatJSON == null) {
await this._handleGeneralNotification(notification.webhookUrl, msg);
return okMsg;
}
let url;
switch (monitorJSON["type"]) {
case "http":
case "keywork":
url = monitorJSON["url"];
break;
case "docker":
url = monitorJSON["docker_host"];
break;
default:
url = monitorJSON["hostname"];
break;
}
const payload = this._notificationPayloadFactory({
monitorMessage: heartbeatJSON.msg,
monitorName: monitorJSON.name,
monitorUrl: url,
status: heartbeatJSON.status
});
await this._sendNotification(notification.webhookUrl, payload);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = ZohoCliq;

@ -9,10 +9,12 @@ const ClickSendSMS = require("./notification-providers/clicksendsms");
const DingDing = require("./notification-providers/dingding"); const DingDing = require("./notification-providers/dingding");
const Discord = require("./notification-providers/discord"); const Discord = require("./notification-providers/discord");
const Feishu = require("./notification-providers/feishu"); const Feishu = require("./notification-providers/feishu");
const FreeMobile = require("./notification-providers/freemobile");
const GoogleChat = require("./notification-providers/google-chat"); const GoogleChat = require("./notification-providers/google-chat");
const Gorush = require("./notification-providers/gorush"); const Gorush = require("./notification-providers/gorush");
const Gotify = require("./notification-providers/gotify"); const Gotify = require("./notification-providers/gotify");
const HomeAssistant = require("./notification-providers/home-assistant"); const HomeAssistant = require("./notification-providers/home-assistant");
const Kook = require("./notification-providers/kook");
const Line = require("./notification-providers/line"); const Line = require("./notification-providers/line");
const LineNotify = require("./notification-providers/linenotify"); const LineNotify = require("./notification-providers/linenotify");
const LunaSea = require("./notification-providers/lunasea"); const LunaSea = require("./notification-providers/lunasea");
@ -31,14 +33,20 @@ const RocketChat = require("./notification-providers/rocket-chat");
const SerwerSMS = require("./notification-providers/serwersms"); const SerwerSMS = require("./notification-providers/serwersms");
const Signal = require("./notification-providers/signal"); const Signal = require("./notification-providers/signal");
const Slack = require("./notification-providers/slack"); const Slack = require("./notification-providers/slack");
const SMSEagle = require("./notification-providers/smseagle");
const SMTP = require("./notification-providers/smtp"); const SMTP = require("./notification-providers/smtp");
const Squadcast = require("./notification-providers/squadcast");
const Stackfield = require("./notification-providers/stackfield"); const Stackfield = require("./notification-providers/stackfield");
const Teams = require("./notification-providers/teams"); const Teams = require("./notification-providers/teams");
const TechulusPush = require("./notification-providers/techulus-push"); const TechulusPush = require("./notification-providers/techulus-push");
const Telegram = require("./notification-providers/telegram"); const Telegram = require("./notification-providers/telegram");
const Splunk = require("./notification-providers/splunk");
const Webhook = require("./notification-providers/webhook"); const Webhook = require("./notification-providers/webhook");
const WeCom = require("./notification-providers/wecom"); const WeCom = require("./notification-providers/wecom");
const GoAlert = require("./notification-providers/goalert"); const GoAlert = require("./notification-providers/goalert");
const SMSManager = require("./notification-providers/smsmanager");
const ServerChan = require("./notification-providers/serverchan");
const ZohoCliq = require("./notification-providers/zoho-cliq");
class Notification { class Notification {
@ -60,10 +68,12 @@ class Notification {
new DingDing(), new DingDing(),
new Discord(), new Discord(),
new Feishu(), new Feishu(),
new FreeMobile(),
new GoogleChat(), new GoogleChat(),
new Gorush(), new Gorush(),
new Gotify(), new Gotify(),
new HomeAssistant(), new HomeAssistant(),
new Kook(),
new Line(), new Line(),
new LineNotify(), new LineNotify(),
new LunaSea(), new LunaSea(),
@ -79,17 +89,23 @@ class Notification {
new Pushover(), new Pushover(),
new Pushy(), new Pushy(),
new RocketChat(), new RocketChat(),
new ServerChan(),
new SerwerSMS(), new SerwerSMS(),
new Signal(), new Signal(),
new SMSManager(),
new Slack(), new Slack(),
new SMSEagle(),
new SMTP(), new SMTP(),
new Squadcast(),
new Stackfield(), new Stackfield(),
new Teams(), new Teams(),
new TechulusPush(), new TechulusPush(),
new Telegram(), new Telegram(),
new Splunk(),
new Webhook(), new Webhook(),
new WeCom(), new WeCom(),
new GoAlert(), new GoAlert(),
new ZohoCliq()
]; ];
for (let item of list) { for (let item of list) {

@ -1,199 +0,0 @@
// https://github.com/ben-bradley/ping-lite/blob/master/ping-lite.js
// Fixed on Windows
const net = require("net");
const spawn = require("child_process").spawn;
const events = require("events");
const fs = require("fs");
const util = require("./util-server");
module.exports = Ping;
/**
* Constructor for ping class
* @param {string} host Host to ping
* @param {object} [options] Options for the ping command
* @param {array|string} [options.args] - Arguments to pass to the ping command
*/
function Ping(host, options) {
if (!host) {
throw new Error("You must specify a host to ping!");
}
this._host = host;
this._options = options = (options || {});
events.EventEmitter.call(this);
const timeout = 10;
if (util.WIN) {
this._bin = "c:/windows/system32/ping.exe";
this._args = (options.args) ? options.args : [ "-n", "1", "-w", timeout * 1000, host ];
this._regmatch = /[><=]([0-9.]+?)ms/;
} else if (util.LIN) {
this._bin = "/bin/ping";
const defaultArgs = [ "-n", "-w", timeout, "-c", "1", host ];
if (net.isIPv6(host) || options.ipv6) {
defaultArgs.unshift("-6");
}
this._args = (options.args) ? options.args : defaultArgs;
this._regmatch = /=([0-9.]+?) ms/;
} else if (util.MAC) {
if (net.isIPv6(host) || options.ipv6) {
this._bin = "/sbin/ping6";
} else {
this._bin = "/sbin/ping";
}
this._args = (options.args) ? options.args : [ "-n", "-t", timeout, "-c", "1", host ];
this._regmatch = /=([0-9.]+?) ms/;
} else if (util.BSD) {
this._bin = "/sbin/ping";
const defaultArgs = [ "-n", "-t", timeout, "-c", "1", host ];
if (net.isIPv6(host) || options.ipv6) {
defaultArgs.unshift("-6");
}
this._args = (options.args) ? options.args : defaultArgs;
this._regmatch = /=([0-9.]+?) ms/;
} else {
throw new Error("Could not detect your ping binary.");
}
if (!fs.existsSync(this._bin)) {
throw new Error("Could not detect " + this._bin + " on your system");
}
this._i = 0;
return this;
}
Ping.prototype.__proto__ = events.EventEmitter.prototype;
/**
* Callback for send
* @callback pingCB
* @param {any} err Any error encountered
* @param {number} ms Ping time in ms
*/
/**
* Send a ping
* @param {pingCB} callback Callback to call with results
*/
Ping.prototype.send = function (callback) {
let self = this;
callback = callback || function (err, ms) {
if (err) {
return self.emit("error", err);
}
return self.emit("result", ms);
};
let _ended;
let _exited;
let _errored;
this._ping = spawn(this._bin, this._args); // spawn the binary
this._ping.on("error", function (err) { // handle binary errors
_errored = true;
callback(err);
});
this._ping.stdout.on("data", function (data) { // log stdout
if (util.WIN) {
data = convertOutput(data);
}
this._stdout = (this._stdout || "") + data;
});
this._ping.stdout.on("end", function () {
_ended = true;
if (_exited && !_errored) {
onEnd.call(self._ping);
}
});
this._ping.stderr.on("data", function (data) { // log stderr
if (util.WIN) {
data = convertOutput(data);
}
this._stderr = (this._stderr || "") + data;
});
this._ping.on("exit", function (code) { // handle complete
_exited = true;
if (_ended && !_errored) {
onEnd.call(self._ping);
}
});
/**
* @param {Function} callback
*
* Generated by Trelent
*/
function onEnd() {
let stdout = this.stdout._stdout;
let stderr = this.stderr._stderr;
let ms;
if (stderr) {
return callback(new Error(stderr));
}
if (!stdout) {
return callback(new Error("No stdout detected"));
}
ms = stdout.match(self._regmatch); // parse out the ##ms response
ms = (ms && ms[1]) ? Number(ms[1]) : ms;
callback(null, ms, stdout);
}
};
/**
* Ping every interval
* @param {pingCB} callback Callback to call with results
*/
Ping.prototype.start = function (callback) {
let self = this;
this._i = setInterval(function () {
self.send(callback);
}, (self._options.interval || 5000));
self.send(callback);
};
/** Stop sending pings */
Ping.prototype.stop = function () {
clearInterval(this._i);
};
/**
* Try to convert to UTF-8 for Windows, as the ping's output on Windows is not UTF-8 and could be in other languages
* Thank @pemassi
* https://github.com/louislam/uptime-kuma/issues/570#issuecomment-941984094
* @param {any} data
* @returns {string}
*/
function convertOutput(data) {
if (util.WIN) {
if (data) {
return util.convertToUTF8(data);
}
}
return data;
}

@ -0,0 +1,13 @@
class Plugin {
async load() {
}
async unload() {
}
}
module.exports = {
Plugin,
};

@ -0,0 +1,256 @@
const fs = require("fs");
const { log } = require("../src/util");
const path = require("path");
const axios = require("axios");
const { Git } = require("./git");
const childProcess = require("child_process");
class PluginsManager {
static disable = false;
/**
* Plugin List
* @type {PluginWrapper[]}
*/
pluginList = [];
/**
* Plugins Dir
*/
pluginsDir;
server;
/**
*
* @param {UptimeKumaServer} server
*/
constructor(server) {
this.server = server;
if (!PluginsManager.disable) {
this.pluginsDir = "./data/plugins/";
if (! fs.existsSync(this.pluginsDir)) {
fs.mkdirSync(this.pluginsDir, { recursive: true });
}
log.debug("plugin", "Scanning plugin directory");
let list = fs.readdirSync(this.pluginsDir);
this.pluginList = [];
for (let item of list) {
this.loadPlugin(item);
}
} else {
log.warn("PLUGIN", "Skip scanning plugin directory");
}
}
/**
* Install a Plugin
*/
async loadPlugin(name) {
log.info("plugin", "Load " + name);
let plugin = new PluginWrapper(this.server, this.pluginsDir + name);
try {
await plugin.load();
this.pluginList.push(plugin);
} catch (e) {
log.error("plugin", "Failed to load plugin: " + this.pluginsDir + name);
log.error("plugin", "Reason: " + e.message);
}
}
/**
* Download a Plugin
* @param {string} repoURL Git repo url
* @param {string} name Directory name, also known as plugin unique name
*/
downloadPlugin(repoURL, name) {
if (fs.existsSync(this.pluginsDir + name)) {
log.info("plugin", "Plugin folder already exists? Removing...");
fs.rmSync(this.pluginsDir + name, {
recursive: true
});
}
log.info("plugin", "Installing plugin: " + name + " " + repoURL);
let result = Git.clone(repoURL, this.pluginsDir, name);
log.info("plugin", "Install result: " + result);
}
/**
* Remove a plugin
* @param {string} name
*/
async removePlugin(name) {
log.info("plugin", "Removing plugin: " + name);
for (let plugin of this.pluginList) {
if (plugin.info.name === name) {
await plugin.unload();
// Delete the plugin directory
fs.rmSync(this.pluginsDir + name, {
recursive: true
});
this.pluginList.splice(this.pluginList.indexOf(plugin), 1);
return;
}
}
log.warn("plugin", "Plugin not found: " + name);
throw new Error("Plugin not found: " + name);
}
/**
* TODO: Update a plugin
* Only available for plugins which were downloaded from the official list
* @param pluginID
*/
updatePlugin(pluginID) {
}
/**
* Get the plugin list from server + local installed plugin list
* Item will be merged if the `name` is the same.
* @returns {Promise<[]>}
*/
async fetchPluginList() {
let remotePluginList;
try {
const res = await axios.get("https://uptime.kuma.pet/c/plugins.json");
remotePluginList = res.data.pluginList;
} catch (e) {
log.error("plugin", "Failed to fetch plugin list: " + e.message);
remotePluginList = [];
}
for (let plugin of this.pluginList) {
let find = false;
// Try to merge
for (let remotePlugin of remotePluginList) {
if (remotePlugin.name === plugin.info.name) {
find = true;
remotePlugin.installed = true;
remotePlugin.name = plugin.info.name;
remotePlugin.fullName = plugin.info.fullName;
remotePlugin.description = plugin.info.description;
remotePlugin.version = plugin.info.version;
break;
}
}
// Local plugin
if (!find) {
plugin.info.local = true;
remotePluginList.push(plugin.info);
}
}
// Sort Installed first, then sort by name
return remotePluginList.sort((a, b) => {
if (a.installed === b.installed) {
if (a.fullName < b.fullName) {
return -1;
}
if (a.fullName > b.fullName) {
return 1;
}
return 0;
} else if (a.installed) {
return -1;
} else {
return 1;
}
});
}
}
class PluginWrapper {
server = undefined;
pluginDir = undefined;
/**
* Must be an `new-able` class.
* @type {function}
*/
pluginClass = undefined;
/**
*
* @type {Plugin}
*/
object = undefined;
info = {};
/**
*
* @param {UptimeKumaServer} server
* @param {string} pluginDir
*/
constructor(server, pluginDir) {
this.server = server;
this.pluginDir = pluginDir;
}
async load() {
let indexFile = this.pluginDir + "/index.js";
let packageJSON = this.pluginDir + "/package.json";
log.info("plugin", "Installing dependencies");
if (fs.existsSync(indexFile)) {
// Install dependencies
let result = childProcess.spawnSync("npm", [ "install" ], {
cwd: this.pluginDir,
env: {
...process.env,
PLAYWRIGHT_BROWSERS_PATH: "../../browsers", // Special handling for read-browser-monitor
}
});
if (result.stdout) {
log.info("plugin", "Install dependencies result: " + result.stdout.toString("utf-8"));
} else {
log.warn("plugin", "Install dependencies result: no output");
}
this.pluginClass = require(path.join(process.cwd(), indexFile));
let pluginClassType = typeof this.pluginClass;
if (pluginClassType === "function") {
this.object = new this.pluginClass(this.server);
await this.object.load();
} else {
throw new Error("Invalid plugin, it does not export a class");
}
if (fs.existsSync(packageJSON)) {
this.info = require(path.join(process.cwd(), packageJSON));
} else {
this.info.fullName = this.pluginDir;
this.info.name = "[unknown]";
this.info.version = "[unknown-version]";
}
this.info.installed = true;
log.info("plugin", `${this.info.fullName} v${this.info.version} loaded`);
}
}
async unload() {
await this.object.unload();
}
}
module.exports = {
PluginsManager,
PluginWrapper
};

@ -99,6 +99,7 @@ class Prometheus {
} }
} }
/** Remove monitor from prometheus */
remove() { remove() {
try { try {
monitorCertDaysRemaining.remove(this.monitorLabelValues); monitorCertDaysRemaining.remove(this.monitorLabelValues);

@ -7,7 +7,7 @@ const { UptimeKumaServer } = require("./uptime-kuma-server");
class Proxy { class Proxy {
static SUPPORTED_PROXY_PROTOCOLS = [ "http", "https", "socks", "socks5", "socks4" ]; static SUPPORTED_PROXY_PROTOCOLS = [ "http", "https", "socks", "socks5", "socks5h", "socks4" ];
/** /**
* Saves and updates given proxy entity * Saves and updates given proxy entity
@ -126,6 +126,7 @@ class Proxy {
break; break;
case "socks": case "socks":
case "socks5": case "socks5":
case "socks5h":
case "socks4": case "socks4":
agent = new SocksProxyAgent({ agent = new SocksProxyAgent({
...httpAgentOptions, ...httpAgentOptions,

@ -4,7 +4,7 @@ const { R } = require("redbean-node");
const apicache = require("../modules/apicache"); const apicache = require("../modules/apicache");
const Monitor = require("../model/monitor"); const Monitor = require("../model/monitor");
const dayjs = require("dayjs"); const dayjs = require("dayjs");
const { UP, DOWN, flipStatus, log } = require("../../src/util"); const { UP, MAINTENANCE, DOWN, flipStatus, log } = require("../../src/util");
const StatusPage = require("../model/status_page"); const StatusPage = require("../model/status_page");
const { UptimeKumaServer } = require("../uptime-kuma-server"); const { UptimeKumaServer } = require("../uptime-kuma-server");
const { makeBadge } = require("badge-maker"); const { makeBadge } = require("badge-maker");
@ -67,6 +67,11 @@ router.get("/api/push/:pushToken", async (request, response) => {
duration = dayjs(bean.time).diff(dayjs(previousHeartbeat.time), "second"); duration = dayjs(bean.time).diff(dayjs(previousHeartbeat.time), "second");
} }
if (await Monitor.isUnderMaintenance(monitor.id)) {
msg = "Monitor under maintenance";
status = MAINTENANCE;
}
log.debug("router", `/api/push/ called at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`); log.debug("router", `/api/push/ called at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`);
log.debug("router", "PreviousStatus: " + previousStatus); log.debug("router", "PreviousStatus: " + previousStatus);
log.debug("router", "Current Status: " + status); log.debug("router", "Current Status: " + status);
@ -87,7 +92,7 @@ router.get("/api/push/:pushToken", async (request, response) => {
ok: true, ok: true,
}); });
if (bean.important) { if (Monitor.isImportantForNotification(isFirstBeat, previousStatus, status)) {
await Monitor.sendNotification(isFirstBeat, monitor, bean); await Monitor.sendNotification(isFirstBeat, monitor, bean);
} }
@ -106,8 +111,12 @@ router.get("/api/badge/:id/status", cache("5 minutes"), async (request, response
label, label,
upLabel = "Up", upLabel = "Up",
downLabel = "Down", downLabel = "Down",
pendingLabel = "Pending",
maintenanceLabel = "Maintenance",
upColor = badgeConstants.defaultUpColor, upColor = badgeConstants.defaultUpColor,
downColor = badgeConstants.defaultDownColor, downColor = badgeConstants.defaultDownColor,
pendingColor = badgeConstants.defaultPendingColor,
maintenanceColor = badgeConstants.defaultMaintenanceColor,
style = badgeConstants.defaultStyle, style = badgeConstants.defaultStyle,
value, // for demo purpose only value, // for demo purpose only
} = request.query; } = request.query;
@ -134,11 +143,30 @@ router.get("/api/badge/:id/status", cache("5 minutes"), async (request, response
badgeValues.color = badgeConstants.naColor; badgeValues.color = badgeConstants.naColor;
} else { } else {
const heartbeat = await Monitor.getPreviousHeartbeat(requestedMonitorId); const heartbeat = await Monitor.getPreviousHeartbeat(requestedMonitorId);
const state = overrideValue !== undefined ? overrideValue : heartbeat.status === 1; const state = overrideValue !== undefined ? overrideValue : heartbeat.status;
badgeValues.label = label ? label : ""; badgeValues.label = label ?? "";
badgeValues.color = state ? upColor : downColor; switch (state) {
badgeValues.message = label ?? state ? upLabel : downLabel; case 0:
badgeValues.color = downColor;
badgeValues.message = downLabel;
break;
case 1:
badgeValues.color = upColor;
badgeValues.message = upLabel;
break;
case 2:
badgeValues.color = pendingColor;
badgeValues.message = pendingLabel;
break;
case 3:
badgeValues.color = maintenanceColor;
badgeValues.message = maintenanceLabel;
break;
default:
badgeValues.color = badgeConstants.naColor;
badgeValues.message = "N/A";
}
} }
// build the svg based on given values // build the svg based on given values

@ -5,6 +5,12 @@
*/ */
console.log("Welcome to Uptime Kuma"); console.log("Welcome to Uptime Kuma");
// As the log function need to use dayjs, it should be very top
const dayjs = require("dayjs");
dayjs.extend(require("dayjs/plugin/utc"));
dayjs.extend(require("./modules/dayjs/plugin/timezone"));
dayjs.extend(require("dayjs/plugin/customParseFormat"));
// Check Node.js Version // Check Node.js Version
const nodeVersion = parseInt(process.versions.node.split(".")[0]); const nodeVersion = parseInt(process.versions.node.split(".")[0]);
const requiredVersion = 14; const requiredVersion = 14;
@ -33,6 +39,7 @@ log.info("server", "Importing Node libraries");
const fs = require("fs"); const fs = require("fs");
log.info("server", "Importing 3rd-party libraries"); log.info("server", "Importing 3rd-party libraries");
log.debug("server", "Importing express"); log.debug("server", "Importing express");
const express = require("express"); const express = require("express");
const expressStaticGzip = require("express-static-gzip"); const expressStaticGzip = require("express-static-gzip");
@ -127,6 +134,11 @@ const StatusPage = require("./model/status_page");
const { cloudflaredSocketHandler, autoStart: cloudflaredAutoStart, stop: cloudflaredStop } = require("./socket-handlers/cloudflared-socket-handler"); const { cloudflaredSocketHandler, autoStart: cloudflaredAutoStart, stop: cloudflaredStop } = require("./socket-handlers/cloudflared-socket-handler");
const { proxySocketHandler } = require("./socket-handlers/proxy-socket-handler"); const { proxySocketHandler } = require("./socket-handlers/proxy-socket-handler");
const { dockerSocketHandler } = require("./socket-handlers/docker-socket-handler"); const { dockerSocketHandler } = require("./socket-handlers/docker-socket-handler");
const { maintenanceSocketHandler } = require("./socket-handlers/maintenance-socket-handler");
const { generalSocketHandler } = require("./socket-handlers/general-socket-handler");
const { Settings } = require("./settings");
const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent");
const { pluginsHandler } = require("./socket-handlers/plugins-handler");
app.use(express.json()); app.use(express.json());
@ -154,8 +166,9 @@ let needSetup = false;
(async () => { (async () => {
Database.init(args); Database.init(args);
await initDatabase(testMode); await initDatabase(testMode);
await server.initAfterDatabaseReady();
exports.entryPage = await setting("entryPage"); server.loadPlugins();
server.entryPage = await Settings.get("entryPage");
await StatusPage.loadDomainMappingList(); await StatusPage.loadDomainMappingList();
log.info("server", "Adding route"); log.info("server", "Adding route");
@ -176,14 +189,15 @@ let needSetup = false;
log.debug("entry", `Request Domain: ${hostname}`); log.debug("entry", `Request Domain: ${hostname}`);
const uptimeKumaEntryPage = server.entryPage;
if (hostname in StatusPage.domainMappingList) { if (hostname in StatusPage.domainMappingList) {
log.debug("entry", "This is a status page domain"); log.debug("entry", "This is a status page domain");
let slug = StatusPage.domainMappingList[hostname]; let slug = StatusPage.domainMappingList[hostname];
await StatusPage.handleStatusPageResponse(response, server.indexHTML, slug); await StatusPage.handleStatusPageResponse(response, server.indexHTML, slug);
} else if (exports.entryPage && exports.entryPage.startsWith("statusPage-")) { } else if (uptimeKumaEntryPage && uptimeKumaEntryPage.startsWith("statusPage-")) {
response.redirect("/status/" + exports.entryPage.replace("statusPage-", "")); response.redirect("/status/" + uptimeKumaEntryPage.replace("statusPage-", ""));
} else { } else {
response.redirect("/dashboard"); response.redirect("/dashboard");
@ -192,6 +206,7 @@ let needSetup = false;
if (isDev) { if (isDev) {
app.post("/test-webhook", async (request, response) => { app.post("/test-webhook", async (request, response) => {
log.debug("test", request.headers);
log.debug("test", request.body); log.debug("test", request.body);
response.send("OK"); response.send("OK");
}); });
@ -200,7 +215,7 @@ let needSetup = false;
// Robots.txt // Robots.txt
app.get("/robots.txt", async (_request, response) => { app.get("/robots.txt", async (_request, response) => {
let txt = "User-agent: *\nDisallow:"; let txt = "User-agent: *\nDisallow:";
if (! await setting("searchEngineIndex")) { if (!await setting("searchEngineIndex")) {
txt += " /"; txt += " /";
} }
response.setHeader("Content-Type", "text/plain"); response.setHeader("Content-Type", "text/plain");
@ -560,7 +575,6 @@ let needSetup = false;
}); });
} }
} catch (error) { } catch (error) {
console.log(error);
callback({ callback({
ok: false, ok: false,
msg: error.message, msg: error.message,
@ -620,6 +634,9 @@ let needSetup = false;
bean.import(monitor); bean.import(monitor);
bean.user_id = socket.userID; bean.user_id = socket.userID;
bean.validate();
await R.store(bean); await R.store(bean);
await updateMonitorNotification(bean.id, notificationIDList); await updateMonitorNotification(bean.id, notificationIDList);
@ -672,12 +689,14 @@ let needSetup = false;
bean.retryInterval = monitor.retryInterval; bean.retryInterval = monitor.retryInterval;
bean.resendInterval = monitor.resendInterval; bean.resendInterval = monitor.resendInterval;
bean.hostname = monitor.hostname; bean.hostname = monitor.hostname;
bean.game = monitor.game;
bean.maxretries = monitor.maxretries; bean.maxretries = monitor.maxretries;
bean.port = parseInt(monitor.port); bean.port = parseInt(monitor.port);
bean.keyword = monitor.keyword; bean.keyword = monitor.keyword;
bean.ignoreTls = monitor.ignoreTls; bean.ignoreTls = monitor.ignoreTls;
bean.expiryNotification = monitor.expiryNotification; bean.expiryNotification = monitor.expiryNotification;
bean.upsideDown = monitor.upsideDown; bean.upsideDown = monitor.upsideDown;
bean.packetSize = monitor.packetSize;
bean.maxredirects = monitor.maxredirects; bean.maxredirects = monitor.maxredirects;
bean.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes); bean.accepted_statuscodes_json = JSON.stringify(monitor.accepted_statuscodes);
bean.dns_resolve_type = monitor.dns_resolve_type; bean.dns_resolve_type = monitor.dns_resolve_type;
@ -695,12 +714,21 @@ let needSetup = false;
bean.authMethod = monitor.authMethod; bean.authMethod = monitor.authMethod;
bean.authWorkstation = monitor.authWorkstation; bean.authWorkstation = monitor.authWorkstation;
bean.authDomain = monitor.authDomain; bean.authDomain = monitor.authDomain;
bean.grpcUrl = monitor.grpcUrl;
bean.grpcProtobuf = monitor.grpcProtobuf;
bean.grpcServiceName = monitor.grpcServiceName;
bean.grpcMethod = monitor.grpcMethod;
bean.grpcBody = monitor.grpcBody;
bean.grpcMetadata = monitor.grpcMetadata;
bean.grpcEnableTls = monitor.grpcEnableTls;
bean.radiusUsername = monitor.radiusUsername; bean.radiusUsername = monitor.radiusUsername;
bean.radiusPassword = monitor.radiusPassword; bean.radiusPassword = monitor.radiusPassword;
bean.radiusCalledStationId = monitor.radiusCalledStationId; bean.radiusCalledStationId = monitor.radiusCalledStationId;
bean.radiusCallingStationId = monitor.radiusCallingStationId; bean.radiusCallingStationId = monitor.radiusCallingStationId;
bean.radiusSecret = monitor.radiusSecret; bean.radiusSecret = monitor.radiusSecret;
bean.validate();
await R.store(bean); await R.store(bean);
await updateMonitorNotification(bean.id, monitor.notificationIDList); await updateMonitorNotification(bean.id, monitor.notificationIDList);
@ -915,13 +943,21 @@ let needSetup = false;
try { try {
checkLogin(socket); checkLogin(socket);
let bean = await R.findOne("monitor", " id = ? ", [ tag.id ]); let bean = await R.findOne("tag", " id = ? ", [ tag.id ]);
if (bean == null) {
callback({
ok: false,
msg: "Tag not found",
});
return;
}
bean.name = tag.name; bean.name = tag.name;
bean.color = tag.color; bean.color = tag.color;
await R.store(bean); await R.store(bean);
callback({ callback({
ok: true, ok: true,
msg: "Saved",
tag: await bean.toJSON(), tag: await bean.toJSON(),
}); });
@ -1055,10 +1091,15 @@ let needSetup = false;
socket.on("getSettings", async (callback) => { socket.on("getSettings", async (callback) => {
try { try {
checkLogin(socket); checkLogin(socket);
const data = await getSettings("general");
if (!data.serverTimezone) {
data.serverTimezone = await server.getTimezone();
}
callback({ callback({
ok: true, ok: true,
data: await getSettings("general"), data: data,
}); });
} catch (e) { } catch (e) {
@ -1084,7 +1125,14 @@ let needSetup = false;
} }
await setSettings("general", data); await setSettings("general", data);
exports.entryPage = data.entryPage; server.entryPage = data.entryPage;
await CacheableDnsHttpAgent.update();
// Also need to apply timezone globally
if (data.serverTimezone) {
await server.setTimezone(data.serverTimezone);
}
callback({ callback({
ok: true, ok: true,
@ -1092,6 +1140,7 @@ let needSetup = false;
}); });
sendInfo(socket); sendInfo(socket);
server.sendMaintenanceList(socket);
} catch (e) { } catch (e) {
callback({ callback({
@ -1450,6 +1499,9 @@ let needSetup = false;
databaseSocketHandler(socket); databaseSocketHandler(socket);
proxySocketHandler(socket); proxySocketHandler(socket);
dockerSocketHandler(socket); dockerSocketHandler(socket);
maintenanceSocketHandler(socket);
generalSocketHandler(socket, server);
pluginsHandler(socket, server);
log.debug("server", "added all socket handlers"); log.debug("server", "added all socket handlers");
@ -1552,6 +1604,7 @@ async function afterLogin(socket, user) {
socket.join(user.id); socket.join(user.id);
let monitorList = await server.sendMonitorList(socket); let monitorList = await server.sendMonitorList(socket);
server.sendMaintenanceList(socket);
sendNotificationList(socket); sendNotificationList(socket);
sendProxyList(socket); sendProxyList(socket);
sendDockerHostList(socket); sendDockerHostList(socket);
@ -1571,6 +1624,13 @@ async function afterLogin(socket, user) {
for (let monitorID in monitorList) { for (let monitorID in monitorList) {
await Monitor.sendStats(io, monitorID, user.id); await Monitor.sendStats(io, monitorID, user.id);
} }
// Set server timezone from client browser if not set
// It should be run once only
if (! await Settings.get("initServerTimezone")) {
log.debug("server", "emit initServerTimezone");
socket.emit("initServerTimezone");
}
} }
/** /**
@ -1697,6 +1757,8 @@ async function shutdownFunction(signal) {
log.info("server", "Shutdown requested"); log.info("server", "Shutdown requested");
log.info("server", "Called signal: " + signal); log.info("server", "Called signal: " + signal);
await server.stop();
log.info("server", "Stopping all monitors"); log.info("server", "Stopping all monitors");
for (let id in server.monitorList) { for (let id in server.monitorList) {
let monitor = server.monitorList[id]; let monitor = server.monitorList[id];
@ -1707,6 +1769,7 @@ async function shutdownFunction(signal) {
stopBackgroundJobs(); stopBackgroundJobs();
await cloudflaredStop(); await cloudflaredStop();
Settings.stopCacheCleaner();
} }
/** Final function called before application exits */ /** Final function called before application exits */

@ -158,6 +158,13 @@ class Settings {
delete Settings.cacheList[key]; delete Settings.cacheList[key];
} }
} }
static stopCacheCleaner() {
if (Settings.cacheCleaner) {
clearInterval(Settings.cacheCleaner);
Settings.cacheCleaner = null;
}
}
} }
module.exports = { module.exports = {

@ -1,6 +1,7 @@
const { checkLogin, setSetting, setting, doubleCheckPassword } = require("../util-server"); const { checkLogin, setSetting, setting, doubleCheckPassword } = require("../util-server");
const { CloudflaredTunnel } = require("node-cloudflared-tunnel"); const { CloudflaredTunnel } = require("node-cloudflared-tunnel");
const { UptimeKumaServer } = require("../uptime-kuma-server"); const { UptimeKumaServer } = require("../uptime-kuma-server");
const { log } = require("../../src/util");
const io = UptimeKumaServer.getInstance().io; const io = UptimeKumaServer.getInstance().io;
const prefix = "cloudflared_"; const prefix = "cloudflared_";
@ -107,7 +108,7 @@ module.exports.autoStart = async (token) => {
/** Stop cloudflared */ /** Stop cloudflared */
module.exports.stop = async () => { module.exports.stop = async () => {
console.log("Stop cloudflared"); log.info("cloudflared", "Stop cloudflared");
if (cloudflared) { if (cloudflared) {
cloudflared.stop(); cloudflared.stop();
} }

@ -56,7 +56,7 @@ module.exports.dockerSocketHandler = (socket) => {
let amount = await DockerHost.testDockerHost(dockerHost); let amount = await DockerHost.testDockerHost(dockerHost);
let msg; let msg;
if (amount > 1) { if (amount >= 1) {
msg = "Connected Successfully. Amount of containers: " + amount; msg = "Connected Successfully. Amount of containers: " + amount;
} else { } else {
msg = "Connected Successfully, but there are no containers?"; msg = "Connected Successfully, but there are no containers?";

@ -0,0 +1,51 @@
const { log } = require("../../src/util");
const { Settings } = require("../settings");
const { sendInfo } = require("../client");
const { checkLogin } = require("../util-server");
const GameResolver = require("gamedig/lib/GameResolver");
let gameResolver = new GameResolver();
let gameList = null;
/**
* Get a game list via GameDig
* @returns {any[]}
*/
function getGameList() {
if (!gameList) {
gameList = gameResolver._readGames().games.sort((a, b) => {
if ( a.pretty < b.pretty ) {
return -1;
}
if ( a.pretty > b.pretty ) {
return 1;
}
return 0;
});
} else {
return gameList;
}
}
module.exports.generalSocketHandler = (socket, server) => {
socket.on("initServerTimezone", async (timezone) => {
try {
checkLogin(socket);
log.debug("generalSocketHandler", "Timezone: " + timezone);
await Settings.set("initServerTimezone", true);
await server.setTimezone(timezone);
await sendInfo(socket);
} catch (e) {
log.warn("initServerTimezone", e.message);
}
});
socket.on("getGameList", async (callback) => {
callback({
ok: true,
gameList: getGameList(),
});
});
};

@ -0,0 +1,317 @@
const { checkLogin } = require("../util-server");
const { log } = require("../../src/util");
const { R } = require("redbean-node");
const apicache = require("../modules/apicache");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const Maintenance = require("../model/maintenance");
const server = UptimeKumaServer.getInstance();
const MaintenanceTimeslot = require("../model/maintenance_timeslot");
/**
* Handlers for Maintenance
* @param {Socket} socket Socket.io instance
*/
module.exports.maintenanceSocketHandler = (socket) => {
// Add a new maintenance
socket.on("addMaintenance", async (maintenance, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", maintenance);
let bean = Maintenance.jsonToBean(R.dispense("maintenance"), maintenance);
bean.user_id = socket.userID;
let maintenanceID = await R.store(bean);
await MaintenanceTimeslot.generateTimeslot(bean);
await server.sendMaintenanceList(socket);
callback({
ok: true,
msg: "Added Successfully.",
maintenanceID,
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
// Edit a maintenance
socket.on("editMaintenance", async (maintenance, callback) => {
try {
checkLogin(socket);
let bean = await R.findOne("maintenance", " id = ? ", [ maintenance.id ]);
if (bean.user_id !== socket.userID) {
throw new Error("Permission denied.");
}
Maintenance.jsonToBean(bean, maintenance);
await R.store(bean);
await MaintenanceTimeslot.generateTimeslot(bean, null, true);
await server.sendMaintenanceList(socket);
callback({
ok: true,
msg: "Saved.",
maintenanceID: bean.id,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
// Add a new monitor_maintenance
socket.on("addMonitorMaintenance", async (maintenanceID, monitors, callback) => {
try {
checkLogin(socket);
await R.exec("DELETE FROM monitor_maintenance WHERE maintenance_id = ?", [
maintenanceID
]);
for await (const monitor of monitors) {
let bean = R.dispense("monitor_maintenance");
bean.import({
monitor_id: monitor.id,
maintenance_id: maintenanceID
});
await R.store(bean);
}
apicache.clear();
callback({
ok: true,
msg: "Added Successfully.",
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
// Add a new monitor_maintenance
socket.on("addMaintenanceStatusPage", async (maintenanceID, statusPages, callback) => {
try {
checkLogin(socket);
await R.exec("DELETE FROM maintenance_status_page WHERE maintenance_id = ?", [
maintenanceID
]);
for await (const statusPage of statusPages) {
let bean = R.dispense("maintenance_status_page");
bean.import({
status_page_id: statusPage.id,
maintenance_id: maintenanceID
});
await R.store(bean);
}
apicache.clear();
callback({
ok: true,
msg: "Added Successfully.",
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Get Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
let bean = await R.findOne("maintenance", " id = ? AND user_id = ? ", [
maintenanceID,
socket.userID,
]);
callback({
ok: true,
maintenance: await bean.toJSON(),
});
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMaintenanceList", async (callback) => {
try {
checkLogin(socket);
await server.sendMaintenanceList(socket);
callback({
ok: true,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMonitorMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Get Monitors for Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
let monitors = await R.getAll("SELECT monitor.id, monitor.name FROM monitor_maintenance mm JOIN monitor ON mm.monitor_id = monitor.id WHERE mm.maintenance_id = ? ", [
maintenanceID,
]);
callback({
ok: true,
monitors,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("getMaintenanceStatusPage", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Get Status Pages for Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
let statusPages = await R.getAll("SELECT status_page.id, status_page.title FROM maintenance_status_page msp JOIN status_page ON msp.status_page_id = status_page.id WHERE msp.maintenance_id = ? ", [
maintenanceID,
]);
callback({
ok: true,
statusPages,
});
} catch (e) {
console.error(e);
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("deleteMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Delete Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
if (maintenanceID in server.maintenanceList) {
delete server.maintenanceList[maintenanceID];
}
await R.exec("DELETE FROM maintenance WHERE id = ? AND user_id = ? ", [
maintenanceID,
socket.userID,
]);
apicache.clear();
callback({
ok: true,
msg: "Deleted Successfully.",
});
await server.sendMaintenanceList(socket);
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("pauseMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Pause Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
await R.exec("UPDATE maintenance SET active = 0 WHERE id = ? ", [
maintenanceID,
]);
apicache.clear();
callback({
ok: true,
msg: "Paused Successfully.",
});
await server.sendMaintenanceList(socket);
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
socket.on("resumeMaintenance", async (maintenanceID, callback) => {
try {
checkLogin(socket);
log.debug("maintenance", `Resume Maintenance: ${maintenanceID} User ID: ${socket.userID}`);
await R.exec("UPDATE maintenance SET active = 1 WHERE id = ? ", [
maintenanceID,
]);
apicache.clear();
callback({
ok: true,
msg: "Resume Successfully",
});
await server.sendMaintenanceList(socket);
} catch (e) {
callback({
ok: false,
msg: e.message,
});
}
});
};

@ -0,0 +1,69 @@
const { checkLogin } = require("../util-server");
const { PluginsManager } = require("../plugins-manager");
const { log } = require("../../src/util.js");
/**
* Handlers for plugins
* @param {Socket} socket Socket.io instance
* @param {UptimeKumaServer} server
*/
module.exports.pluginsHandler = (socket, server) => {
const pluginManager = server.getPluginManager();
// Get Plugin List
socket.on("getPluginList", async (callback) => {
try {
checkLogin(socket);
log.debug("plugin", "PluginManager.disable: " + PluginsManager.disable);
if (PluginsManager.disable) {
throw new Error("Plugin Disabled: In order to enable plugin feature, you need to use the default data directory: ./data/");
}
let pluginList = await pluginManager.fetchPluginList();
callback({
ok: true,
pluginList,
});
} catch (error) {
log.warn("plugin", "Error: " + error.message);
callback({
ok: false,
msg: error.message,
});
}
});
socket.on("installPlugin", async (repoURL, name, callback) => {
try {
checkLogin(socket);
pluginManager.downloadPlugin(repoURL, name);
await pluginManager.loadPlugin(name);
callback({
ok: true,
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
socket.on("uninstallPlugin", async (name, callback) => {
try {
checkLogin(socket);
await pluginManager.removePlugin(name);
callback({
ok: true,
});
} catch (error) {
callback({
ok: false,
msg: error.message,
});
}
});
};

@ -0,0 +1,49 @@
const { log } = require("../src/util");
class UptimeCacheList {
/**
* list[monitorID][duration]
*/
static list = {};
/**
* Get the uptime for a specific period
* @param {number} monitorID
* @param {number} duration
* @return {number}
*/
static getUptime(monitorID, duration) {
if (UptimeCacheList.list[monitorID] && UptimeCacheList.list[monitorID][duration]) {
log.debug("UptimeCacheList", "getUptime: " + monitorID + " " + duration);
return UptimeCacheList.list[monitorID][duration];
} else {
return null;
}
}
/**
* Add uptime for specified monitor
* @param {number} monitorID
* @param {number} duration
* @param {number} uptime Uptime to add
*/
static addUptime(monitorID, duration, uptime) {
log.debug("UptimeCacheList", "addUptime: " + monitorID + " " + duration);
if (!UptimeCacheList.list[monitorID]) {
UptimeCacheList.list[monitorID] = {};
}
UptimeCacheList.list[monitorID][duration] = uptime;
}
/**
* Clear cache for specified monitor
* @param {number} monitorID
*/
static clearCache(monitorID) {
log.debug("UptimeCacheList", "clearCache: " + monitorID);
delete UptimeCacheList.list[monitorID];
}
}
module.exports = {
UptimeCacheList,
};

@ -9,6 +9,9 @@ const Database = require("./database");
const util = require("util"); const util = require("util");
const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent"); const { CacheableDnsHttpAgent } = require("./cacheable-dns-http-agent");
const { Settings } = require("./settings"); const { Settings } = require("./settings");
const dayjs = require("dayjs");
const { PluginsManager } = require("./plugins-manager");
// DO NOT IMPORT HERE IF THE MODULES USED `UptimeKumaServer.getInstance()`
/** /**
* `module.exports` (alias: `server`) should be inside this class, in order to avoid circular dependency issue. * `module.exports` (alias: `server`) should be inside this class, in order to avoid circular dependency issue.
@ -26,6 +29,13 @@ class UptimeKumaServer {
* @type {{}} * @type {{}}
*/ */
monitorList = {}; monitorList = {};
/**
* Main maintenance list
* @type {{}}
*/
maintenanceList = {};
entryPage = "dashboard"; entryPage = "dashboard";
app = undefined; app = undefined;
httpServer = undefined; httpServer = undefined;
@ -37,6 +47,22 @@ class UptimeKumaServer {
*/ */
indexHTML = ""; indexHTML = "";
generateMaintenanceTimeslotsInterval = undefined;
/**
* Plugins Manager
* @type {PluginsManager}
*/
pluginsManager = null;
/**
*
* @type {{}}
*/
static monitorTypeList = {
};
static getInstance(args) { static getInstance(args) {
if (UptimeKumaServer.instance == null) { if (UptimeKumaServer.instance == null) {
UptimeKumaServer.instance = new UptimeKumaServer(args); UptimeKumaServer.instance = new UptimeKumaServer(args);
@ -72,11 +98,27 @@ class UptimeKumaServer {
} }
} }
CacheableDnsHttpAgent.registerGlobalAgent();
this.io = new Server(this.httpServer); this.io = new Server(this.httpServer);
} }
/** Initialise app after the database has been set up */
async initAfterDatabaseReady() {
await CacheableDnsHttpAgent.update();
process.env.TZ = await this.getTimezone();
dayjs.tz.setDefault(process.env.TZ);
log.debug("DEBUG", "Timezone: " + process.env.TZ);
log.debug("DEBUG", "Current Time: " + dayjs.tz().format());
await this.generateMaintenanceTimeslots();
this.generateMaintenanceTimeslotsInterval = setInterval(this.generateMaintenanceTimeslots, 60 * 1000);
}
/**
* Send list of monitors to client
* @param {Socket} socket
* @returns {Object} List of monitors
*/
async sendMonitorList(socket) { async sendMonitorList(socket) {
let list = await this.getMonitorJSONList(socket.userID); let list = await this.getMonitorJSONList(socket.userID);
this.io.to(socket.userID).emit("monitorList", list); this.io.to(socket.userID).emit("monitorList", list);
@ -104,6 +146,45 @@ class UptimeKumaServer {
return result; return result;
} }
/**
* Send maintenance list to client
* @param {Socket} socket Socket.io instance to send to
* @returns {Object}
*/
async sendMaintenanceList(socket) {
return await this.sendMaintenanceListByUserID(socket.userID);
}
/**
* Send list of maintenances to user
* @param {number} userID
* @returns {Object}
*/
async sendMaintenanceListByUserID(userID) {
let list = await this.getMaintenanceJSONList(userID);
this.io.to(userID).emit("maintenanceList", list);
return list;
}
/**
* Get a list of maintenances for the given user.
* @param {string} userID - The ID of the user to get maintenances for.
* @returns {Promise<Object>} A promise that resolves to an object with maintenance IDs as keys and maintenances objects as values.
*/
async getMaintenanceJSONList(userID) {
let result = {};
let maintenanceList = await R.find("maintenance", " user_id = ? ORDER BY end_date DESC, title", [
userID,
]);
for (let maintenance of maintenanceList) {
result[maintenance.id] = await maintenance.toJSON();
}
return result;
}
/** /**
* Write error to log file * Write error to log file
* @param {any} error The error to write * @param {any} error The error to write
@ -130,6 +211,11 @@ class UptimeKumaServer {
errorLogStream.end(); errorLogStream.end();
} }
/**
* Get the IP of the client connected to the socket
* @param {Socket} socket
* @returns {string}
*/
async getClientIP(socket) { async getClientIP(socket) {
let clientIP = socket.client.conn.remoteAddress; let clientIP = socket.client.conn.remoteAddress;
@ -138,15 +224,115 @@ class UptimeKumaServer {
} }
if (await Settings.get("trustProxy")) { if (await Settings.get("trustProxy")) {
return socket.client.conn.request.headers["x-forwarded-for"] const forwardedFor = socket.client.conn.request.headers["x-forwarded-for"];
return (typeof forwardedFor === "string" ? forwardedFor.split(",")[0].trim() : null)
|| socket.client.conn.request.headers["x-real-ip"] || socket.client.conn.request.headers["x-real-ip"]
|| clientIP.replace(/^.*:/, ""); || clientIP.replace(/^.*:/, "");
} else { } else {
return clientIP.replace(/^.*:/, ""); return clientIP.replace(/^.*:/, "");
} }
} }
/**
* Attempt to get the current server timezone
* If this fails, fall back to environment variables and then make a
* guess.
* @returns {string}
*/
async getTimezone() {
let timezone = await Settings.get("serverTimezone");
if (timezone) {
return timezone;
} else if (process.env.TZ) {
return process.env.TZ;
} else {
return dayjs.tz.guess();
}
}
/**
* Get the current offset
* @returns {string}
*/
getTimezoneOffset() {
return dayjs().format("Z");
}
/**
* Set the current server timezone and environment variables
* @param {string} timezone
*/
async setTimezone(timezone) {
await Settings.set("serverTimezone", timezone, "general");
process.env.TZ = timezone;
dayjs.tz.setDefault(timezone);
}
/** Load the timeslots for maintenance */
async generateMaintenanceTimeslots() {
let list = await R.find("maintenance_timeslot", " generated_next = 0 AND start_date <= DATETIME('now') ");
for (let maintenanceTimeslot of list) {
let maintenance = await maintenanceTimeslot.maintenance;
await MaintenanceTimeslot.generateTimeslot(maintenance, maintenanceTimeslot.end_date, false);
maintenanceTimeslot.generated_next = true;
await R.store(maintenanceTimeslot);
}
}
/** Stop the server */
async stop() {
clearTimeout(this.generateMaintenanceTimeslotsInterval);
}
loadPlugins() {
this.pluginsManager = new PluginsManager(this);
}
/**
*
* @returns {PluginsManager}
*/
getPluginManager() {
return this.pluginsManager;
}
/**
*
* @param {MonitorType} monitorType
*/
addMonitorType(monitorType) {
if (monitorType instanceof MonitorType && monitorType.name) {
if (monitorType.name in UptimeKumaServer.monitorTypeList) {
log.error("", "Conflict Monitor Type name");
}
UptimeKumaServer.monitorTypeList[monitorType.name] = monitorType;
} else {
log.error("", "Invalid Monitor Type: " + monitorType.name);
}
}
/**
*
* @param {MonitorType} monitorType
*/
removeMonitorType(monitorType) {
if (UptimeKumaServer.monitorTypeList[monitorType.name] === monitorType) {
delete UptimeKumaServer.monitorTypeList[monitorType.name];
} else {
log.error("", "Remove MonitorType failed: " + monitorType.name);
}
}
} }
module.exports = { module.exports = {
UptimeKumaServer UptimeKumaServer
}; };
// Must be at the end
const MaintenanceTimeslot = require("./model/maintenance_timeslot");
const { MonitorType } = require("./monitor-types/monitor-type");

@ -1,5 +1,5 @@
const tcpp = require("tcp-ping"); const tcpp = require("tcp-ping");
const Ping = require("./ping-lite"); const ping = require("@louislam/ping");
const { R } = require("redbean-node"); const { R } = require("redbean-node");
const { log, genSecret } = require("../src/util"); const { log, genSecret } = require("../src/util");
const passwordHash = require("./password-hash"); const passwordHash = require("./password-hash");
@ -13,21 +13,22 @@ const { badgeConstants } = require("./config");
const mssql = require("mssql"); const mssql = require("mssql");
const { Client } = require("pg"); const { Client } = require("pg");
const postgresConParse = require("pg-connection-string").parse; const postgresConParse = require("pg-connection-string").parse;
const mysql = require("mysql2");
const { MongoClient } = require("mongodb");
const { NtlmClient } = require("axios-ntlm"); const { NtlmClient } = require("axios-ntlm");
const { Settings } = require("./settings"); const { Settings } = require("./settings");
const grpc = require("@grpc/grpc-js");
const protojs = require("protobufjs");
const radiusClient = require("node-radius-client"); const radiusClient = require("node-radius-client");
const redis = require("redis");
const { const {
dictionaries: { dictionaries: {
rfc2865: { file, attributes }, rfc2865: { file, attributes },
}, },
} = require("node-radius-utils"); } = require("node-radius-utils");
const dayjs = require("dayjs");
// From ping-lite const isWindows = process.platform === /^win/.test(process.platform);
exports.WIN = /^win/.test(process.platform);
exports.LIN = /^linux/.test(process.platform);
exports.MAC = /^darwin/.test(process.platform);
exports.FBSD = /^freebsd/.test(process.platform);
exports.BSD = /bsd$/.test(process.platform);
/** /**
* Init or reset JWT secret * Init or reset JWT secret
@ -78,15 +79,16 @@ exports.tcping = function (hostname, port) {
/** /**
* Ping the specified machine * Ping the specified machine
* @param {string} hostname Hostname / address of machine * @param {string} hostname Hostname / address of machine
* @param {number} [size=56] Size of packet to send
* @returns {Promise<number>} Time for ping in ms rounded to nearest integer * @returns {Promise<number>} Time for ping in ms rounded to nearest integer
*/ */
exports.ping = async (hostname) => { exports.ping = async (hostname, size = 56) => {
try { try {
return await exports.pingAsync(hostname); return await exports.pingAsync(hostname, false, size);
} catch (e) { } catch (e) {
// If the host cannot be resolved, try again with ipv6 // If the host cannot be resolved, try again with ipv6
if (e.message.includes("service not known")) { if (e.message.includes("service not known")) {
return await exports.pingAsync(hostname, true); return await exports.pingAsync(hostname, true, size);
} else { } else {
throw e; throw e;
} }
@ -97,22 +99,29 @@ exports.ping = async (hostname) => {
* Ping the specified machine * Ping the specified machine
* @param {string} hostname Hostname / address of machine to ping * @param {string} hostname Hostname / address of machine to ping
* @param {boolean} ipv6 Should IPv6 be used? * @param {boolean} ipv6 Should IPv6 be used?
* @param {number} [size = 56] Size of ping packet to send
* @returns {Promise<number>} Time for ping in ms rounded to nearest integer * @returns {Promise<number>} Time for ping in ms rounded to nearest integer
*/ */
exports.pingAsync = function (hostname, ipv6 = false) { exports.pingAsync = function (hostname, ipv6 = false, size = 56) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const ping = new Ping(hostname, { ping.promise.probe(hostname, {
ipv6 v6: ipv6,
}); min_reply: 1,
deadline: 10,
ping.send(function (err, ms, stdout) { packetSize: size,
if (err) { }).then((res) => {
reject(err); // If ping failed, it will set field to unknown
} else if (ms === null) { if (res.alive) {
reject(new Error(stdout)); resolve(res.time);
} else { } else {
resolve(Math.round(ms)); if (isWindows) {
reject(new Error(exports.convertToUTF8(res.output)));
} else {
reject(new Error(res.output));
}
} }
}).catch((err) => {
reject(err);
}); });
}); });
}; };
@ -131,7 +140,7 @@ exports.mqttAsync = function (hostname, topic, okMessage, options = {}) {
const { port, username, password, interval = 20 } = options; const { port, username, password, interval = 20 } = options;
// Adds MQTT protocol to the hostname if not already present // Adds MQTT protocol to the hostname if not already present
if (!/^(?:http|mqtt)s?:\/\//.test(hostname)) { if (!/^(?:http|mqtt|ws)s?:\/\//.test(hostname)) {
hostname = "mqtt://" + hostname; hostname = "mqtt://" + hostname;
} }
@ -141,10 +150,11 @@ exports.mqttAsync = function (hostname, topic, okMessage, options = {}) {
reject(new Error("Timeout")); reject(new Error("Timeout"));
}, interval * 1000 * 0.8); }, interval * 1000 * 0.8);
log.debug("mqtt", "MQTT connecting"); const mqttUrl = `${hostname}:${port}`;
log.debug("mqtt", `MQTT connecting to ${mqttUrl}`);
let client = mqtt.connect(hostname, { let client = mqtt.connect(mqttUrl, {
port,
username, username,
password password
}); });
@ -244,19 +254,19 @@ exports.dnsResolve = function (hostname, resolverServer, resolverPort, rrtype) {
* @param {string} query The query to validate the database with * @param {string} query The query to validate the database with
* @returns {Promise<(string[]|Object[]|Object)>} * @returns {Promise<(string[]|Object[]|Object)>}
*/ */
exports.mssqlQuery = function (connectionString, query) { exports.mssqlQuery = async function (connectionString, query) {
return new Promise((resolve, reject) => { let pool;
mssql.connect(connectionString).then(pool => { try {
return pool.request() pool = new mssql.ConnectionPool(connectionString);
.query(query); await pool.connect();
}).then(result => { await pool.request().query(query);
resolve(result); pool.close();
}).catch(err => { } catch (e) {
reject(err); if (pool) {
}).finally(() => { pool.close();
mssql.close(); }
}); throw e;
}); }
}; };
/** /**
@ -276,9 +286,36 @@ exports.postgresQuery = function (connectionString, query) {
const client = new Client({ connectionString }); const client = new Client({ connectionString });
client.connect(); client.connect((err) => {
if (err) {
reject(err);
client.end();
} else {
// Connected here
client.query(query, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res);
}
client.end();
});
}
});
});
};
return client.query(query) /**
* Run a query on MySQL/MariaDB
* @param {string} connectionString The database connection string
* @param {string} query The query to validate the database with
* @returns {Promise<(string[]|Object[]|Object)>}
*/
exports.mysqlQuery = function (connectionString, query) {
return new Promise((resolve, reject) => {
const connection = mysql.createConnection(connectionString);
connection.promise().query(query)
.then(res => { .then(res => {
resolve(res); resolve(res);
}) })
@ -286,11 +323,39 @@ exports.postgresQuery = function (connectionString, query) {
reject(err); reject(err);
}) })
.finally(() => { .finally(() => {
client.end(); connection.end();
}); });
}); });
}; };
/**
* Connect to and Ping a MongoDB database
* @param {string} connectionString The database connection string
* @returns {Promise<(string[]|Object[]|Object)>}
*/
exports.mongodbPing = async function (connectionString) {
let client = await MongoClient.connect(connectionString);
let dbPing = await client.db().command({ ping: 1 });
await client.close();
if (dbPing["ok"] === 1) {
return "UP";
} else {
throw Error("failed");
}
};
/**
* Query radius server
* @param {string} hostname Hostname of radius server
* @param {string} username Username to use
* @param {string} password Password to use
* @param {string} calledStationId ID of called station
* @param {string} callingStationId ID of calling station
* @param {string} secret Secret to use
* @param {number} [port=1812] Port to contact radius server on
* @returns {Promise<any>}
*/
exports.radius = function ( exports.radius = function (
hostname, hostname,
username, username,
@ -298,9 +363,11 @@ exports.radius = function (
calledStationId, calledStationId,
callingStationId, callingStationId,
secret, secret,
port = 1812,
) { ) {
const client = new radiusClient({ const client = new radiusClient({
host: hostname, host: hostname,
hostPort: port,
dictionaries: [ file ], dictionaries: [ file ],
}); });
@ -315,6 +382,30 @@ exports.radius = function (
}); });
}; };
/**
* Redis server ping
* @param {string} dsn The redis connection string
*/
exports.redisPingAsync = function (dsn) {
return new Promise((resolve, reject) => {
const client = redis.createClient({
url: dsn,
});
client.on("error", (err) => {
reject(err);
});
client.connect().then(() => {
client.ping().then((res, err) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
});
};
/** /**
* Retrieve value of setting based on key * Retrieve value of setting based on key
* @param {string} key Key of setting to retrieve * @param {string} key Key of setting to retrieve
@ -431,6 +522,10 @@ const parseCertificateInfo = function (info) {
* @returns {Object} Object containing certificate information * @returns {Object} Object containing certificate information
*/ */
exports.checkCertificate = function (res) { exports.checkCertificate = function (res) {
if (!res.request.res.socket) {
throw new Error("No socket found");
}
const info = res.request.res.socket.getPeerCertificate(true); const info = res.request.res.socket.getPeerCertificate(true);
const valid = res.request.res.socket.authorized || false; const valid = res.request.res.socket.authorized || false;
@ -557,7 +652,7 @@ exports.doubleCheckPassword = async (socket, currentPassword) => {
exports.startUnitTest = async () => { exports.startUnitTest = async () => {
console.log("Starting unit test..."); console.log("Starting unit test...");
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm"; const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const child = childProcess.spawn(npm, [ "run", "jest" ]); const child = childProcess.spawn(npm, [ "run", "jest-backend" ]);
child.stdout.on("data", (data) => { child.stdout.on("data", (data) => {
console.log(data.toString()); console.log(data.toString());
@ -645,3 +740,121 @@ module.exports.send403 = (res, msg = "") => {
"msg": msg, "msg": msg,
}); });
}; };
function timeObjectConvertTimezone(obj, timezone, timeObjectToUTC = true) {
let offsetString;
if (timezone) {
offsetString = dayjs().tz(timezone).format("Z");
} else {
offsetString = dayjs().format("Z");
}
let hours = parseInt(offsetString.substring(1, 3));
let minutes = parseInt(offsetString.substring(4, 6));
if (
(timeObjectToUTC && offsetString.startsWith("+")) ||
(!timeObjectToUTC && offsetString.startsWith("-"))
) {
hours *= -1;
minutes *= -1;
}
obj.hours += hours;
obj.minutes += minutes;
// Handle out of bound
if (obj.minutes < 0) {
obj.minutes += 60;
obj.hours--;
} else if (obj.minutes > 60) {
obj.minutes -= 60;
obj.hours++;
}
if (obj.hours < 0) {
obj.hours += 24;
} else if (obj.hours > 24) {
obj.hours -= 24;
}
return obj;
}
/**
*
* @param {object} obj
* @param {string} timezone
* @returns {object}
*/
module.exports.timeObjectToUTC = (obj, timezone = undefined) => {
return timeObjectConvertTimezone(obj, timezone, true);
};
/**
*
* @param {object} obj
* @param {string} timezone
* @returns {object}
*/
module.exports.timeObjectToLocal = (obj, timezone = undefined) => {
return timeObjectConvertTimezone(obj, timezone, false);
};
/**
* Create gRPC client stib
* @param {Object} options from gRPC client
*/
module.exports.grpcQuery = async (options) => {
const { grpcUrl, grpcProtobufData, grpcServiceName, grpcEnableTls, grpcMethod, grpcBody } = options;
const protocObject = protojs.parse(grpcProtobufData);
const protoServiceObject = protocObject.root.lookupService(grpcServiceName);
const Client = grpc.makeGenericClientConstructor({});
const credentials = grpcEnableTls ? grpc.credentials.createSsl() : grpc.credentials.createInsecure();
const client = new Client(
grpcUrl,
credentials
);
const grpcService = protoServiceObject.create(function (method, requestData, cb) {
const fullServiceName = method.fullName;
const serviceFQDN = fullServiceName.split(".");
const serviceMethod = serviceFQDN.pop();
const serviceMethodClientImpl = `/${serviceFQDN.slice(1).join(".")}/${serviceMethod}`;
log.debug("monitor", `gRPC method ${serviceMethodClientImpl}`);
client.makeUnaryRequest(
serviceMethodClientImpl,
arg => arg,
arg => arg,
requestData,
cb);
}, false, false);
return new Promise((resolve, _) => {
try {
return grpcService[`${grpcMethod}`](JSON.parse(grpcBody), function (err, response) {
const responseData = JSON.stringify(response);
if (err) {
return resolve({
code: err.code,
errorMessage: err.details,
data: ""
});
} else {
log.debug("monitor:", `gRPC response: ${JSON.stringify(response)}`);
return resolve({
code: 1,
errorMessage: "",
data: responseData
});
}
});
} catch (err) {
return resolve({
code: -1,
errorMessage: `Error ${err}. Please review your gRPC configuration option. The service name must not include package name value, and the method name must follow camelCase format`,
data: ""
});
}
});
};

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save