Merge branch 'master' into feature/680_add_labels_to_prometheus_metrics

pull/898/head
Matthew Macdonald-Wallace 2 years ago committed by GitHub
commit 93633089f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -28,6 +28,8 @@ SECURITY.md
tsconfig.json
.env
/tmp
/babel.config.js
/ecosystem.config.js
### .gitignore content (commented rules are duplicated)
@ -42,4 +44,6 @@ dist-ssr
#!/data/.gitkeep
#.vscode
### End of .gitignore content

@ -1,4 +1,9 @@
module.exports = {
ignorePatterns: [
"test/*",
"server/modules/apicache/*",
"src/util.js"
],
root: true,
env: {
browser: true,
@ -17,39 +22,48 @@ module.exports = {
requireConfigFile: false,
},
rules: {
"linebreak-style": ["error", "unix"],
"camelcase": ["warn", {
"yoda": "error",
eqeqeq: [ "warn", "smart" ],
"linebreak-style": [ "error", "unix" ],
"camelcase": [ "warn", {
"properties": "never",
"ignoreImports": true
}],
// override/add rules settings here, such as:
// 'vue/no-unused-vars': 'error'
"no-unused-vars": "warn",
"no-unused-vars": [ "warn", {
"args": "none"
}],
indent: [
"error",
4,
{
ignoredNodes: ["TemplateLiteral"],
ignoredNodes: [ "TemplateLiteral" ],
SwitchCase: 1,
},
],
quotes: ["warn", "double"],
semi: "warn",
"vue/html-indent": ["warn", 4], // default: 2
quotes: [ "error", "double" ],
semi: "error",
"vue/html-indent": [ "error", 4 ], // default: 2
"vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off",
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"no-multi-spaces": ["error", {
"vue/multi-word-component-names": "off",
"no-multi-spaces": [ "error", {
ignoreEOLComments: true,
}],
"space-before-function-paren": ["error", {
"array-bracket-spacing": [ "warn", "always", {
"singleValue": true,
"objectsInArrays": false,
"arraysInArrays": false
}],
"space-before-function-paren": [ "error", {
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}],
"curly": "error",
"object-curly-spacing": ["error", "always"],
"object-curly-spacing": [ "error", "always" ],
"object-curly-newline": "off",
"object-property-newline": "error",
"comma-spacing": "error",
@ -59,37 +73,37 @@ module.exports = {
"keyword-spacing": "warn",
"space-infix-ops": "warn",
"arrow-spacing": "warn",
"no-trailing-spaces": "warn",
"no-constant-condition": ["error", {
"no-trailing-spaces": "error",
"no-constant-condition": [ "error", {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//'no-console': 'warn',
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": ["warn", {
"no-multiple-empty-lines": [ "warn", {
"max": 1,
"maxBOF": 0,
}],
"lines-between-class-members": ["warn", "always", {
"lines-between-class-members": [ "warn", "always", {
exceptAfterSingleLine: true,
}],
"no-unneeded-ternary": "error",
"array-bracket-newline": ["error", "consistent"],
"eol-last": ["error", "always"],
"array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ],
//'prefer-template': 'error',
"comma-dangle": ["warn", "only-multiline"],
"no-empty": ["error", {
"comma-dangle": [ "warn", "only-multiline" ],
"no-empty": [ "error", {
"allowEmptyCatch": true
}],
"no-control-regex": "off",
"one-var": ["error", "never"],
"max-statements-per-line": ["error", { "max": 1 }]
"one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }]
},
"overrides": [
{
"files": [ "src/languages/*.js", "src/icon.js" ],
"rules": {
"comma-dangle": ["error", "always-multiline"],
"comma-dangle": [ "error", "always-multiline" ],
}
},

@ -1,3 +1,5 @@
👉 Delete this line if you have read and agree our pull request rules and guidelines: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma
# Description
Fixes #(issue)
@ -20,6 +22,7 @@ Please delete any options that are not relevant.
- [ ] I ran ESLint and other linters for modified files
- [ ] I have performed a self-review of my own code and tested it
- [ ] I have commented my code, particularly in hard-to-understand areas
(including JSDoc for methods)
- [ ] My changes generate no new warnings
- [ ] My code needed automated testing. I have added them (this is optional task)

@ -11,25 +11,42 @@ on:
jobs:
auto-test:
needs: [ check-linters ]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
node-version: [14.x, 16.x, 17.x]
node: [ 14, 16, 17, 18 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
- uses: actions/checkout@v2
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
node-version: ${{ matrix.node }}
cache: 'npm'
- run: npm run install-legacy
- run: npm install
- run: npm run build
- run: npm test
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
check-linters:
runs-on: ubuntu-latest
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
cache: 'npm'
- run: npm install
- run: npm run lint

@ -1,22 +0,0 @@
name: 'Automatically close stale issues and PRs'
on:
schedule:
- cron: '0 0 * * *'
#Run once a day at midnight
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v4
with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
stale-pr-message: 'We are clearing up our old Pull Requests and yours has been open for 6 months with no activity. Remove stale label or comment or this will be closed in 7 days.'
close-issue-message: 'This issue was closed because it has been stalled for 7 days with no activity.'
close-pr-message: 'This PR was closed because it has been stalled for 7 days with no activity.'
days-before-stale: 180
days-before-close: 0
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,'
exempt-pr-labels: 'awaiting-approval,work-in-progress,enhancement,feature-request'
exempt-issue-assignees: 'louislam'
exempt-pr-assignees: 'louislam'

@ -0,0 +1 @@
legacy-peer-deps=true

@ -1,9 +1,14 @@
{
"extends": "stylelint-config-standard",
"customSyntax": "postcss-html",
"rules": {
"indentation": 4,
"no-descending-specificity": null,
"selector-list-comma-newline-after": null,
"declaration-empty-line-before": null
"declaration-empty-line-before": null,
"alpha-value-notation": "number",
"color-function-notation": "legacy",
"shorthand-property-no-redundant-values": null,
"color-hex-length": null,
}
}

@ -1 +1 @@
git.kuma.pet
git.kuma.pet

@ -27,12 +27,34 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
## Can I create a pull request for Uptime Kuma?
Generally, if the pull request is working fine, and it does not affect any existing logic, workflow and performance, I will merge into the master branch once it is tested.
Yes, you can. However, since I don't want to waste your time, be sure to **create empty draft pull request, so we can discuss first** if it is a large pull request or you don't know it will be merged or not.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it.
✅ Accept:
- Bug/Security fix
- Translations
- Adding notification providers
⚠️ Discussion First
- Large pull requests
- New features
❌ Won't Merge
- Do not pass auto test
- Any breaking changes
- Duplicated pull request
- Buggy
- Existing logic is completely modified or deleted for no reason
- A function that is completely out of scope
If you are not sure whether I will accept your pull request, feel free to create an empty pull request draft first.
### Recommended Pull Request Guideline
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
1. Fork the project
1. Clone your fork repo to local
1. Create a new branch
@ -42,50 +64,7 @@ If you are not sure whether I will accept your pull request, feel free to create
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare
1. Write a proper description
1. Click "Change to draft"
### Pull Request Examples
Here are some example situations in the past.
#### ✅ High - Medium Priority
Easy to review, no breaking change and not touching the existing code
- Add a new notification
- Add a chart
- Fix a bug
- Translations
- Add a independent new feature
#### *️⃣ Requires one more reviewer
I do not have such knowledge to test it.
- Add k8s supports
#### ⚠ Low Priority - Harsh Mode
Some pull requests are required to modify the core. To be honest, I do not want anyone to try to do that, because it would spend a lot of your time. I will review your pull request harshly. Also, you may need to write a lot of unit tests to ensure that there is no breaking change.
- Touch large parts of code of any very important features
- Touch monitoring logic
- Drop a table or drop a column for any reason
- Touch the entry point of Docker or Node.js
- Modify auth
#### *️⃣ Low Priority
It changed my current workflow and require further studies.
- Change my release approach
#### ❌ Won't Merge
- Any breaking changes
- Duplicated pull request
- Buggy
- Existing logic is completely modified or deleted
- A function that is completely out of scope
1. Discussion
## Project Styles
@ -93,27 +72,30 @@ I personally do not like something need to learn so much and need to config so m
- Easy to install for non-Docker users, no native build dependency is needed (at least for x86_64), no extra config, no extra effort to get it run
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Env var is not encouraged.
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR`.
- Easy to use
- The web UI styling should be consistent and nice.
## Coding Styles
- 4 spaces indentation
- Follow `.editorconfig`
- Follow ESLint
- Methods and functions should be documented with JSDoc
## Name convention
- Javascript/Typescript: camelCaseType
- SQLite: underscore_type
- CSS/SCSS: dash-type
- SQLite: snake_case (Underscore)
- CSS/SCSS: kebab-case (Dash)
## Tools
- Node.js >= 14
- NPM >= 8.5
- Git
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA)
- A SQLite tool (SQLite Expert Personal is suggested)
- A SQLite GUI tool (SQLite Expert Personal is suggested)
## Install dependencies
@ -121,39 +103,45 @@ I personally do not like something need to learn so much and need to config so m
npm ci
```
## How to start the Backend Dev Server
## Dev Server
(2022-04-26 Update)
(2021-09-23 Update)
We can start the frontend dev server and the backend dev server in one command.
Port `3000` and port `3001` will be used.
```bash
npm run start-server-dev
npm run dev
```
## Backend Server
It binds to `0.0.0.0:3001` by default.
### Backend Details
It is mainly a socket.io app + express.js.
express.js is just used for serving the frontend built files (index.html, .js and .css etc.)
express.js is used for:
- entry point such as redirecting to a status page or the dashboard
- serving the frontend built files (index.html, .js and .css etc.)
- serving internal APIs of status page
### Structure in /server/
- model/ (Object model, auto mapping to the database table name)
- modules/ (Modified 3rd-party modules)
- notification-providers/ (individual notification logic)
- routers/ (Express Routers)
- socket-handler (Socket.io Handlers)
- server.js (Server main logic)
## How to start the Frontend Dev Server
- server.js (Server entry point and main logic)
1. Set the env var `NODE_ENV` to "development".
2. Start the frontend dev server by the following command.
## Frontend Dev Server
```bash
npm run dev
```
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only.
It binds to `0.0.0.0:3000` by default.
For production, it is not used. It will be compiled to `dist` directory instead.
You can use Vue.js devtools Chrome extension for debugging.
@ -221,14 +209,13 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
### Release Procedures
1. Draft a release note
1. Make sure the repo is cleared
1. `npm run update-version 1.X.X`
1. `npm run build`
1. `npm run build-docker`
1. `git push`
1. Publish the release note as 1.X.X
1. `npm run upload-artifacts`
1. SSH to demo site server and update to 1.X.X
2. Make sure the repo is cleared
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue`
5. `git push`
6. Publish the release note as 1.X.X
7. Press any key to continue
8. SSH to demo site server and update to 1.X.X
Checking:
@ -236,6 +223,15 @@ Checking:
- Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
- Try clean installation with Node.js
### Release Beta Procedures
1. Draft a release note, check "This is a pre-release"
2. Make sure the repo is cleared
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue`
5. Publish the release note as 1.X.X-beta.X
6. Press any key to continue
### Release Wiki
#### Setup Repo

@ -23,21 +23,23 @@ VPS is sponsored by Uptime Kuma sponsors on [Open Collective](https://opencollec
## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server.
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers.
* Fancy, Reactive, Fast UI/UX.
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [70+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications).
* 20 second intervals.
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/languages)
* Simple Status Page
* Multiple Status Pages
* Map Status Page to Domain
* Ping Chart
* Certificate Info
* Proxy Support
* 2FA available
## 🔧 How to Install
### 🐳 Docker
```bash
docker volume create uptime-kuma
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
```
@ -47,7 +49,10 @@ Browse to http://localhost:3001 after starting.
### 💪🏻 Non-Docker
Required Tools: Node.js >= 14, git and pm2.
Required Tools:
- [Node.js](https://nodejs.org/en/download/) >= 14
- [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For run in background
```bash
# Update your npm to the latest version
@ -61,12 +66,26 @@ npm run setup
node server/server.js
# (Recommended) Option 2. Run in background using PM2
# Install PM2 if you don't have it: npm install pm2 -g
# Install PM2 if you don't have it:
npm install pm2 -g && pm2 install pm2-logrotate
# Start Server
pm2 start server/server.js --name uptime-kuma
```
```
Browse to http://localhost:3001 after starting.
More useful PM2 Commands
```bash
# If you want to see the current console output
pm2 monit
# If you want to add it to startup
pm2 save && pm2 startup
```
### Advanced Installation
If you need more options or need to browse via a reverse proxy, please read:
@ -93,7 +112,7 @@ https://github.com/louislam/uptime-kuma/projects/1
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
<img src="https://uptime.kuma.pet/sponsors?v=3" alt />
<img src="https://uptime.kuma.pet/sponsors?v=6" alt />
## 🖼 More Screenshots
@ -115,7 +134,7 @@ Telegram Notification Sample:
## Motivation
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and unmaintained.
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained.
* Want to build a fancy UI.
* Learn Vue 3 and vite.js.
* Show the power of Bootstrap 5.
@ -132,16 +151,23 @@ You can discuss or ask for help in [issues](https://github.com/louislam/uptime-k
### Subreddit
My Reddit account: louislamlam
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam).
You can mention me if you ask a question on Reddit.
https://www.reddit.com/r/UptimeKuma/
[r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
## Contribute
If you want to report a bug or request a new feature. Free feel to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Beta Version
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
### Bug Reports / Feature Requests
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Translations
If you want to translate Uptime Kuma into your language, please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great.
English proofreading is needed too because my grammar is not that great, sadly. Feel free to correct my grammar in this README, source code, or wiki.
### Pull Requests
If you want to modify Uptime Kuma, this guideline may be useful for you: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

@ -8,15 +8,9 @@ Do not use the issue tracker or discuss it in the public as it will cause more d
## Supported Versions
Use this section to tell people about which versions of your project are
currently being supported with security updates.
### Uptime Kuma Versions
| Version | Supported |
| ------- | ------------------ |
| 1.9.X | :white_check_mark: |
| <= 1.8.X | ❌ |
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version.
### Upgradable Docker Tags
@ -24,8 +18,8 @@ currently being supported with security updates.
| ------- | ------------------ |
| 1 | :white_check_mark: |
| 1-debian | :white_check_mark: |
| 1-alpine | :white_check_mark: |
| latest | :white_check_mark: |
| debian | :white_check_mark: |
| alpine | :white_check_mark: |
| 1-alpine | ⚠️ Deprecated |
| alpine | ⚠️ Deprecated |
| All other tags | ❌ |

@ -1,11 +1,11 @@
const config = {};
if (process.env.TEST_FRONTEND) {
config.presets = ["@babel/preset-env"];
config.presets = [ "@babel/preset-env" ];
}
if (process.env.TEST_BACKEND) {
config.plugins = ["babel-plugin-rewire"];
config.plugins = [ "babel-plugin-rewire" ];
}
module.exports = config;

@ -1,24 +1,50 @@
import legacy from "@vitejs/plugin-legacy";
import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite";
import visualizer from "rollup-plugin-visualizer";
import viteCompression from "vite-plugin-compression";
const postCssScss = require("postcss-scss");
const postcssRTLCSS = require("postcss-rtlcss");
const viteCompressionFilter = /\.(js|mjs|json|css|html|svg)$/i;
// https://vitejs.dev/config/
export default defineConfig({
define: {
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
},
plugins: [
vue(),
legacy({
targets: ["ie > 11"],
additionalLegacyPolyfills: ["regenerator-runtime/runtime"]
})
targets: [ "since 2015" ],
}),
visualizer({
filename: "tmp/dist-stats.html"
}),
viteCompression({
algorithm: "gzip",
filter: viteCompressionFilter,
}),
viteCompression({
algorithm: "brotliCompress",
filter: viteCompressionFilter,
}),
],
css: {
postcss: {
"parser": postCssScss,
"map": false,
"plugins": [postcssRTLCSS]
"plugins": [ postcssRTLCSS ]
}
},
build: {
rollupOptions: {
output: {
manualChunks(id, { getModuleInfo, getModuleIds }) {
}
}
},
}
});

@ -0,0 +1,5 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor_group
ADD send_url BOOLEAN DEFAULT 0 NOT NULL;
COMMIT;

@ -0,0 +1,18 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE docker_host (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
user_id INT NOT NULL,
docker_daemon VARCHAR(255),
docker_type VARCHAR(255),
name VARCHAR(255)
);
ALTER TABLE monitor
ADD docker_host INTEGER REFERENCES docker_host(id);
ALTER TABLE monitor
ADD docker_container VARCHAR(255);
COMMIT;

@ -0,0 +1,18 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD auth_method VARCHAR(250);
ALTER TABLE monitor
ADD auth_domain TEXT;
ALTER TABLE monitor
ADD auth_workstation TEXT;
COMMIT;
BEGIN TRANSACTION;
UPDATE monitor
SET auth_method = 'basic'
WHERE basic_auth_user is not null;
COMMIT;

@ -0,0 +1,10 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD database_connection_string VARCHAR(2000);
ALTER TABLE monitor
ADD database_query TEXT;
COMMIT

@ -0,0 +1,16 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD mqtt_topic TEXT;
ALTER TABLE monitor
ADD mqtt_success_message VARCHAR(255);
ALTER TABLE monitor
ADD mqtt_username VARCHAR(255);
ALTER TABLE monitor
ADD mqtt_password VARCHAR(255);
COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD expiry_notification BOOLEAN default 1;
COMMIT;

@ -0,0 +1,23 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE proxy (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
user_id INT NOT NULL,
protocol VARCHAR(10) NOT NULL,
host VARCHAR(255) NOT NULL,
port SMALLINT NOT NULL,
auth BOOLEAN NOT NULL,
username VARCHAR(255) NULL,
password VARCHAR(255) NULL,
active BOOLEAN NOT NULL DEFAULT 1,
'default' BOOLEAN NOT NULL DEFAULT 0,
created_date DATETIME DEFAULT (DATETIME('now')) NOT NULL
);
ALTER TABLE monitor ADD COLUMN proxy_id INTEGER REFERENCES proxy(id);
CREATE INDEX proxy_id ON monitor (proxy_id);
CREATE INDEX proxy_user_id ON proxy (user_id);
COMMIT;

@ -0,0 +1,6 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page ADD footer_text TEXT;
ALTER TABLE status_page ADD custom_css TEXT;
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
COMMIT;

@ -0,0 +1,31 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [status_page](
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[slug] VARCHAR(255) NOT NULL UNIQUE,
[title] VARCHAR(255) NOT NULL,
[description] TEXT,
[icon] VARCHAR(255) NOT NULL,
[theme] VARCHAR(30) NOT NULL,
[published] BOOLEAN NOT NULL DEFAULT 1,
[search_engine_index] BOOLEAN NOT NULL DEFAULT 1,
[show_tags] BOOLEAN NOT NULL DEFAULT 0,
[password] VARCHAR,
[created_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
[modified_date] DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE UNIQUE INDEX [slug] ON [status_page]([slug]);
CREATE TABLE [status_page_cname](
[id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[status_page_id] INTEGER NOT NULL REFERENCES [status_page]([id]) ON DELETE CASCADE ON UPDATE CASCADE,
[domain] VARCHAR NOT NULL UNIQUE
);
ALTER TABLE incident ADD status_page_id INTEGER;
ALTER TABLE [group] ADD status_page_id INTEGER;
COMMIT;

@ -1,8 +1,8 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:14-alpine3.12
FROM node:16-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib && \
pip3 --no-cache-dir install apprise==0.9.6 && \
pip3 --no-cache-dir install apprise==0.9.9 && \
rm -rf /root/.cache

@ -1,12 +1,28 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too
FROM node:14-buster-slim
FROM node:16-buster-slim
ARG TARGETPLATFORM
WORKDIR /app
# Install Curl
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine!
RUN apt update && \
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \
sqlite3 iputils-ping util-linux dumb-init && \
pip3 --no-cache-dir install apprise==0.9.6 && \
rm -rf /var/lib/apt/lists/*
pip3 --no-cache-dir install apprise==0.9.9 && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove
# Install cloudflared
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
dpkg --add-architecture arm && \
apt update && \
apt --yes --no-install-recommends install ./cloudflared.deb && \
rm -rf /var/lib/apt/lists/* && \
rm -f cloudflared.deb && \
apt --yes autoremove

@ -1,13 +1,14 @@
# Simple docker-composer.yml
# Simple docker-compose.yml
# You can change your port or volume location
version: '3.3'
services:
uptime-kuma:
image: louislam/uptime-kuma
image: louislam/uptime-kuma:1
container_name: uptime-kuma
volumes:
- ./uptime-kuma:/app/data
- ./uptime-kuma-data:/app/data
ports:
- 3001:3001
- 3001:3001 # <Host Port>:<Container Port>
restart: always

@ -1,6 +1,6 @@
module.exports = {
apps: [{
name: "uptime-kuma",
script: "./server/server.js",
}]
}
apps: [{
name: "uptime-kuma",
script: "./server/server.js",
}]
};

@ -0,0 +1,66 @@
const pkg = require("../../package.json");
const fs = require("fs");
const childProcess = require("child_process");
const util = require("../../src/util");
util.polyfill();
const version = process.env.VERSION;
console.log("Beta Version: " + version);
if (!version || !version.includes("-beta.")) {
console.error("invalid version, beta version only");
process.exit(1);
}
const exists = tagExists(version);
if (! exists) {
// Process package.json
pkg.version = version;
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
childProcess.spawnSync("npm", [ "install" ]);
commit(version);
tag(version);
} else {
console.log("version tag exists, please delete the tag or use another tag");
process.exit(1);
}
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
if (stdout.includes("no changes added to commit")) {
throw new Error("commit error");
}
res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
console.log(res.stdout.toString().trim());
}
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
res = childProcess.spawnSync("git", [ "push", "origin", version ]);
console.log(res.stdout.toString().trim());
}
function tagExists(version) {
if (! version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version;
}

@ -29,7 +29,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
labels: ["invalid-format"]
labels: [ "invalid-format" ]
});
// Add the issue closing comment

@ -0,0 +1,44 @@
//
const http = require("https"); // or 'https' for https:// URLs
const fs = require("fs");
const platform = process.argv[2];
if (!platform) {
console.error("No platform??");
process.exit(1);
}
let arch = null;
if (platform === "linux/amd64") {
arch = "amd64";
} else if (platform === "linux/arm64") {
arch = "arm64";
} else if (platform === "linux/arm/v7") {
arch = "arm";
} else {
console.error("Invalid platform?? " + platform);
}
const file = fs.createWriteStream("cloudflared.deb");
get("https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-" + arch + ".deb");
function get(url) {
http.get(url, function (res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
console.log("Redirect to " + res.headers.location);
get(res.headers.location);
} else if (res.statusCode >= 200 && res.statusCode < 300) {
res.pipe(file);
res.on("end", function () {
console.log("Downloaded");
});
} else {
console.error(res.statusCode);
process.exit(1);
}
});
}

@ -4,6 +4,7 @@ const tar = require("tar");
const packageJSON = require("../package.json");
const fs = require("fs");
const rmSync = require("./fs-rmSync.js");
const version = packageJSON.version;
const filename = "dist.tar.gz";
@ -11,6 +12,12 @@ const filename = "dist.tar.gz";
const url = `https://github.com/louislam/uptime-kuma/releases/download/${version}/${filename}`;
download(url);
/**
* Downloads the latest version of the dist from a GitHub release.
* @param {string} url The URL to download from.
*
* Generated by Trelent
*/
function download(url) {
console.log(url);
@ -21,7 +28,7 @@ function download(url) {
if (fs.existsSync("./dist")) {
if (fs.existsSync("./dist-backup")) {
fs.rmdirSync("./dist-backup", {
rmSync("./dist-backup", {
recursive: true
});
}
@ -35,7 +42,7 @@ function download(url) {
tarStream.on("close", () => {
if (fs.existsSync("./dist-backup")) {
fs.rmdirSync("./dist-backup", {
rmSync("./dist-backup", {
recursive: true
});
}

@ -0,0 +1,19 @@
#!/usr/bin/env node
const childProcess = require("child_process");
let env = process.env;
let cmd = process.argv[2];
let args = process.argv.slice(3);
let replacedArgs = [];
for (let arg of args) {
for (let key in env) {
arg = arg.replaceAll(`$${key}`, env[key]);
}
replacedArgs.push(arg);
}
let child = childProcess.spawn(cmd, replacedArgs);
child.stdout.pipe(process.stdout);
child.stderr.pipe(process.stderr);

@ -0,0 +1,23 @@
const fs = require("fs");
/**
* Detect if `fs.rmSync` is available
* to avoid the runtime deprecation warning triggered for using `fs.rmdirSync` with `{ recursive: true }` in Node.js v16,
* or the `recursive` property removing completely in the future Node.js version.
* See the link below.
*
* @todo Once we drop the support for Node.js v14 (or at least versions before v14.14.0), we can safely replace this function with `fs.rmSync`, since `fs.rmSync` was add in Node.js v14.14.0 and currently we supports all the Node.js v14 versions that include the versions before the v14.14.0, and this function have almost the same signature with `fs.rmSync`.
* @link https://nodejs.org/docs/latest-v16.x/api/deprecations.html#dep0147-fsrmdirpath--recursive-true- the deprecation infomation of `fs.rmdirSync`
* @link https://nodejs.org/docs/latest-v16.x/api/fs.html#fsrmsyncpath-options the document of `fs.rmSync`
* @param {fs.PathLike} path Valid types for path values in "fs".
* @param {fs.RmDirOptions} [options] options for `fs.rmdirSync`, if `fs.rmSync` is available and property `recursive` is true, it will automatically have property `force` with value `true`.
*/
const rmSync = (path, options) => {
if (typeof fs.rmSync === "function") {
if (options.recursive) {
options.force = true;
}
return fs.rmSync(path, options);
}
return fs.rmdirSync(path, options);
};
module.exports = rmSync;

@ -189,7 +189,7 @@ if (type == "local") {
bash("check=$(pm2 --version)");
if (check == "") {
println("Installing PM2");
bash("npm install pm2 -g");
bash("npm install pm2 -g && pm2 install pm2-logrotate");
bash("pm2 startup");
}

@ -4,21 +4,21 @@ const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version
const newVersion = oldVersion + "-nightly"
const oldVersion = pkg.version;
const newVersion = oldVersion + "-nightly";
console.log("Old Version: " + oldVersion)
console.log("New Version: " + newVersion)
console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion);
if (newVersion) {
// Process package.json
pkg.version = newVersion
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion)
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion)
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n")
pkg.version = newVersion;
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Process README.md
if (fs.existsSync("README.md")) {
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion))
fs.writeFileSync("README.md", fs.readFileSync("README.md", "utf8").replaceAll(oldVersion, newVersion));
}
}

@ -0,0 +1,6 @@
console.log("Git Push and Publish the release note on github, then press any key to continue");
process.stdin.setRawMode(true);
process.stdin.resume();
process.stdin.on("data", process.exit.bind(process, 0));

@ -1,11 +1,10 @@
console.log("== Uptime Kuma Reset Password Tool ==");
console.log("Loading the database");
const Database = require("../server/database");
const { R } = require("redbean-node");
const readline = require("readline");
const { initJWTSecret } = require("../server/util-server");
const User = require("../server/model/user");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
@ -13,8 +12,9 @@ const rl = readline.createInterface({
});
const main = async () => {
console.log("Connecting the database");
Database.init(args);
await Database.connect();
await Database.connect(false, false, true);
try {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
@ -31,7 +31,7 @@ const main = async () => {
let confirmPassword = await question("Confirm New Password: ");
if (password === confirmPassword) {
await user.resetPassword(password);
await User.resetPassword(user.id, password);
// Reset all sessions by reset jwt secret
await initJWTSecret();

@ -26,7 +26,7 @@ server.on("request", (request, send, rinfo) => {
ttl: 300,
address: "1.2.3.4"
});
} if (question.type === Packet.TYPE.AAAA) {
} else if (question.type === Packet.TYPE.AAAA) {
response.answers.push({
name: question.name,
type: question.type,

@ -0,0 +1,50 @@
const { log } = require("../src/util");
const mqttUsername = "louis1";
const mqttPassword = "!@#$LLam";
class SimpleMqttServer {
aedes = require("aedes")();
server = require("net").createServer(this.aedes.handle);
constructor(port) {
this.port = port;
}
start() {
this.server.listen(this.port, () => {
console.log("server started and listening on port ", this.port);
});
}
}
let server1 = new SimpleMqttServer(10000);
server1.aedes.authenticate = function (client, username, password, callback) {
if (username && password) {
console.log(password.toString("utf-8"));
callback(null, username === mqttUsername && password.toString("utf-8") === mqttPassword);
} else {
callback(null, false);
}
};
server1.aedes.on("subscribe", (subscriptions, client) => {
console.log(subscriptions);
for (let s of subscriptions) {
if (s.topic === "test") {
server1.aedes.publish({
topic: "test",
payload: Buffer.from("ok"),
}, (error) => {
if (error) {
log.error("mqtt_server", error);
}
});
}
}
});
server1.start();

@ -3,6 +3,7 @@
import fs from "fs";
import path from "path";
import util from "util";
import rmSync from "../fs-rmSync.js";
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
/**
@ -30,7 +31,7 @@ console.log("Arguments:", process.argv);
const baseLangCode = process.argv[2] || "en";
console.log("Base Lang: " + baseLangCode);
if (fs.existsSync("./languages")) {
fs.rmdirSync("./languages", { recursive: true });
rmSync("./languages", { recursive: true });
}
copyRecursiveSync("../../src/languages", "./languages");
@ -40,7 +41,7 @@ const files = fs.readdirSync("./languages");
console.log("Files:", files);
for (const file of files) {
if (!file.endsWith(".js")) {
if (! file.endsWith(".js")) {
console.log("Skipping " + file);
continue;
}
@ -82,5 +83,5 @@ for (const file of files) {
fs.writeFileSync(`../../src/languages/${file}`, code);
}
fs.rmdirSync("./languages", { recursive: true });
rmSync("./languages", { recursive: true });
console.log("Done. Fixing formatting by ESLint...");

@ -1,14 +1,12 @@
const pkg = require("../package.json");
const fs = require("fs");
const child_process = require("child_process");
const childProcess = require("child_process");
const util = require("../src/util");
util.polyfill();
const oldVersion = pkg.version;
const newVersion = process.argv[2];
const newVersion = process.env.VERSION;
console.log("Old Version: " + oldVersion);
console.log("New Version: " + newVersion);
if (! newVersion) {
@ -22,25 +20,31 @@ if (! exists) {
// Process package.json
pkg.version = newVersion;
pkg.scripts.setup = pkg.scripts.setup.replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker"] = pkg.scripts["build-docker"].replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker-alpine"] = pkg.scripts["build-docker-alpine"].replaceAll(oldVersion, newVersion);
pkg.scripts["build-docker-debian"] = pkg.scripts["build-docker-debian"].replaceAll(oldVersion, newVersion);
// Replace the version: https://regex101.com/r/hmj2Bc/1
pkg.scripts.setup = pkg.scripts.setup.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
childProcess.spawnSync("npm", [ "install" ]);
commit(newVersion);
tag(newVersion);
updateWiki(oldVersion, newVersion);
} else {
console.log("version exists");
}
/**
* Updates the version number in package.json and commits it to git.
* @param {string} version - The new version number
*
* Generated by Trelent
*/
function commit(version) {
let msg = "update to " + version;
let msg = "Update to " + version;
let res = child_process.spawnSync("git", ["commit", "-m", msg, "-a"]);
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
@ -50,51 +54,22 @@ function commit(version) {
}
function tag(version) {
let res = child_process.spawnSync("git", ["tag", version]);
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
}
/**
* Checks if a given version is already tagged in the git repository.
* @param {string} version - The version to check for.
*
* Generated by Trelent
*/
function tagExists(version) {
if (! version) {
throw new Error("invalid version");
}
let res = child_process.spawnSync("git", ["tag", "-l", version]);
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version;
}
function updateWiki(oldVersion, newVersion) {
const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
safeDelete(wikiDir);
child_process.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
let content = fs.readFileSync(howToUpdateFilename).toString();
content = content.replaceAll(`git checkout ${oldVersion}`, `git checkout ${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
child_process.spawnSync("git", ["add", "-A"], {
cwd: wikiDir,
});
child_process.spawnSync("git", ["commit", "-m", `Update to ${newVersion} from ${oldVersion}`], {
cwd: wikiDir,
});
console.log("Pushing to Github");
child_process.spawnSync("git", ["push"], {
cwd: wikiDir,
});
safeDelete(wikiDir);
}
function safeDelete(dir) {
if (fs.existsSync(dir)) {
fs.rmdirSync(dir, {
recursive: true,
});
}
}

@ -0,0 +1,48 @@
const childProcess = require("child_process");
const fs = require("fs");
const newVersion = process.env.VERSION;
if (!newVersion) {
console.log("Missing version");
process.exit(1);
}
updateWiki(newVersion);
function updateWiki(newVersion) {
const wikiDir = "./tmp/wiki";
const howToUpdateFilename = "./tmp/wiki/🆙-How-to-Update.md";
safeDelete(wikiDir);
childProcess.spawnSync("git", [ "clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir ]);
let content = fs.readFileSync(howToUpdateFilename).toString();
// Replace the version: https://regex101.com/r/hmj2Bc/1
content = content.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
childProcess.spawnSync("git", [ "add", "-A" ], {
cwd: wikiDir,
});
childProcess.spawnSync("git", [ "commit", "-m", `Update to ${newVersion}` ], {
cwd: wikiDir,
});
console.log("Pushing to Github");
childProcess.spawnSync("git", [ "push" ], {
cwd: wikiDir,
});
safeDelete(wikiDir);
}
function safeDelete(dir) {
if (fs.existsSync(dir)) {
fs.rm(dir, {
recursive: true,
});
}
}

@ -159,7 +159,7 @@ fi
check=$(pm2 --version)
if [ "$check" == "" ]; then
"echo" "-e" "Installing PM2"
npm install pm2 -g
npm install pm2 -g && pm2 install pm2-logrotate
pm2 startup
fi
mkdir -p $installPath

15331
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{
"name": "uptime-kuma",
"version": "1.11.4",
"version": "1.17.1",
"license": "MIT",
"repository": {
"type": "git",
@ -10,12 +10,15 @@
"node": "14.* || >=16.*"
},
"scripts": {
"install-legacy": "npm install --legacy-peer-deps",
"update-legacy": "npm update --legacy-peer-deps",
"install-legacy": "npm install",
"update-legacy": "npm update",
"lint:js": "eslint --ext \".js,.vue\" --ignore-path .gitignore .",
"lint-fix:js": "eslint --ext \".js,.vue\" --fix --ignore-path .gitignore .",
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"dev": "vite --host --config ./config/vite.config.js",
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
"start": "npm run start-server",
"start-server": "node server/server.js",
"start-server-dev": "cross-env NODE_ENV=development node server/server.js",
@ -30,15 +33,14 @@
"build-docker": "npm run build && npm run build-docker-debian && npm run build-docker-alpine",
"build-docker-alpine-base": "docker buildx build -f docker/alpine-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-alpine . --push",
"build-docker-debian-base": "docker buildx build -f docker/debian-base.dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:base-debian . --push",
"build-docker-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:1.11.4-alpine --target release . --push",
"build-docker-debian": "docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:1.11.4 -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:1.11.4-debian --target release . --push",
"build-docker-alpine": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:alpine -t louislam/uptime-kuma:1-alpine -t louislam/uptime-kuma:$VERSION-alpine --target release . --push",
"build-docker-debian": "node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma -t louislam/uptime-kuma:1 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:debian -t louislam/uptime-kuma:1-debian -t louislam/uptime-kuma:$VERSION-debian --target release . --push",
"build-docker-nightly": "npm run build && docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly --target nightly . --push",
"build-docker-nightly-alpine": "docker buildx build -f docker/dockerfile-alpine --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:nightly-alpine --target nightly . --push",
"build-docker-nightly-amd64": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:nightly-amd64 --target nightly . --push --progress plain",
"upload-artifacts": "docker buildx build -f docker/dockerfile --platform linux/amd64 -t louislam/uptime-kuma:upload-artifact --build-arg VERSION --build-arg GITHUB_TOKEN --target upload-artifact . --progress plain",
"setup": "git checkout 1.11.4 && npm ci --production && npm run download-dist",
"setup": "git checkout 1.17.1 && npm ci --production && npm run download-dist",
"download-dist": "node extra/download-dist.js",
"update-version": "node extra/update-version.js",
"mark-as-nightly": "node extra/mark-as-nightly.js",
"reset-password": "node extra/reset-password.js",
"remove-2fa": "node extra/remove-2fa.js",
@ -49,86 +51,115 @@
"test-install-script-ubuntu1604": "npm run compile-install-script && docker build --progress plain -f test/test_install_script/ubuntu1604.dockerfile .",
"test-nodejs16": "docker build --progress plain -f test/ubuntu-nodejs16.dockerfile .",
"simple-dns-server": "node extra/simple-dns-server.js",
"simple-mqtt-server": "node extra/simple-mqtt-server.js",
"update-language-files-with-base-lang": "cd extra/update-language-files && node index.js %npm_config_base_lang% && eslint ../../src/languages/**.js --fix",
"update-language-files": "cd extra/update-language-files && node index.js && eslint ../../src/languages/**.js --fix",
"ncu-patch": "ncu -u -t patch"
"ncu-patch": "npm-check-updates -u -t patch",
"release-final": "node extra/update-version.js && npm run build-docker && node ./extra/press-any-key.js && npm run upload-artifacts && node ./extra/update-wiki-version.js",
"release-beta": "node extra/beta/update-version.js && npm run build && node ./extra/env2arg.js docker buildx build -f docker/dockerfile --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:$VERSION -t louislam/uptime-kuma:beta . --target release --push && node ./extra/press-any-key.js && npm run upload-artifacts",
"git-remove-tag": "git tag -d",
"build-dist-and-restart": "npm run build && npm run start-server-dev"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
"@fortawesome/free-solid-svg-icons": "~5.15.4",
"@fortawesome/vue-fontawesome": "~3.0.0-5",
"@louislam/sqlite3": "~6.0.1",
"@popperjs/core": "~2.10.2",
"@louislam/sqlite3": "~15.0.6",
"args-parser": "~1.3.0",
"axios": "~0.21.4",
"axios": "~0.26.1",
"axios-ntlm": "^1.3.0",
"badge-maker": "^3.3.1",
"bcryptjs": "~2.4.3",
"bootstrap": "5.1.3",
"bree": "~7.1.0",
"bree": "~7.1.5",
"cacheable-lookup": "~6.0.4",
"chardet": "^1.3.0",
"chart.js": "~3.6.0",
"chartjs-adapter-dayjs": "~1.0.0",
"check-password-strength": "^2.0.3",
"check-password-strength": "^2.0.5",
"cheerio": "^1.0.0-rc.10",
"chroma-js": "^2.1.2",
"command-exists": "~1.2.9",
"compare-versions": "~3.6.0",
"dayjs": "~1.10.7",
"express": "~4.17.1",
"express-basic-auth": "~1.2.0",
"compression": "^1.7.4",
"dayjs": "^1.11.0",
"express": "~4.17.3",
"express-basic-auth": "~1.2.1",
"express-static-gzip": "^2.1.7",
"form-data": "~4.0.0",
"http-graceful-shutdown": "~3.1.5",
"http-graceful-shutdown": "~3.1.7",
"http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
"iconv-lite": "^0.6.3",
"jsonwebtoken": "~8.5.1",
"jwt-decode": "^3.1.2",
"limiter": "^2.1.0",
"mqtt": "^4.2.8",
"mssql": "^8.1.0",
"node-cloudflared-tunnel": "~1.0.9",
"nodemailer": "~6.6.5",
"notp": "~2.0.3",
"password-hash": "~1.2.2",
"postcss-rtlcss": "~3.4.1",
"postcss-scss": "~4.0.2",
"pg": "^8.7.3",
"pg-connection-string": "^2.5.0",
"prom-client": "~13.2.0",
"prometheus-api-metrics": "~3.2.0",
"qrcode": "~1.5.0",
"redbean-node": "0.1.3",
"prometheus-api-metrics": "~3.2.1",
"redbean-node": "0.1.4",
"socket.io": "~4.4.1",
"socket.io-client": "~4.4.1",
"socks-proxy-agent": "6.1.1",
"tar": "^6.1.11",
"tcp-ping": "~0.1.1",
"thirty-two": "~1.0.2",
"timezones-list": "~3.0.1",
"v-pagination-3": "~0.1.7",
"vue": "next",
"vue-chart-3": "3.0.9",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.1.9",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-qrcode": "~1.0.0",
"vue-router": "~4.0.12",
"vue-toastification": "~2.0.0-rc.5",
"vuedraggable": "~4.1.0"
"thirty-two": "~1.0.2"
},
"devDependencies": {
"@actions/github": "~5.0.0",
"@babel/eslint-parser": "~7.15.8",
"@actions/github": "~5.0.1",
"@babel/eslint-parser": "~7.17.0",
"@babel/preset-env": "^7.15.8",
"@types/bootstrap": "~5.1.6",
"@vitejs/plugin-legacy": "~1.6.3",
"@vitejs/plugin-vue": "~1.9.4",
"@vue/compiler-sfc": "~3.2.22",
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
"@fortawesome/free-solid-svg-icons": "~5.15.4",
"@fortawesome/vue-fontawesome": "~3.0.0-5",
"@popperjs/core": "~2.10.2",
"@types/bootstrap": "~5.1.9",
"@vitejs/plugin-legacy": "~1.8.2",
"@vitejs/plugin-vue": "~2.3.3",
"@vue/compiler-sfc": "~3.2.36",
"aedes": "^0.46.3",
"babel-plugin-rewire": "~1.2.0",
"bootstrap": "5.1.3",
"chart.js": "~3.6.2",
"chartjs-adapter-dayjs": "~1.0.0",
"concurrently": "^7.1.0",
"core-js": "~3.18.3",
"cross-env": "~7.0.3",
"dns2": "~2.0.1",
"eslint": "~7.32.0",
"eslint-plugin-vue": "~7.18.0",
"eslint": "~8.14.0",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "^0.3.10",
"jest": "~27.2.5",
"jest-puppeteer": "~6.0.0",
"jest-puppeteer": "~6.0.3",
"postcss-html": "^1.3.1",
"postcss-rtlcss": "~3.4.1",
"postcss-scss": "~4.0.3",
"prismjs": "^1.27.0",
"puppeteer": "~13.1.3",
"qrcode": "~1.5.0",
"rollup-plugin-visualizer": "^5.6.0",
"sass": "~1.42.1",
"stylelint": "~14.2.0",
"stylelint-config-standard": "~24.0.0",
"stylelint": "~14.7.1",
"stylelint-config-standard": "~25.0.0",
"timezones-list": "~3.0.1",
"typescript": "~4.4.4",
"vite": "~2.6.14"
"v-pagination-3": "~0.1.7",
"vite": "~2.9.9",
"vite-plugin-compression": "^0.5.1",
"vue": "next",
"vue-chart-3": "3.0.9",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~9.1.9",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-prism-editor": "^2.0.0-alpha.2",
"vue-qrcode": "~1.0.0",
"vue-router": "~4.0.14",
"vue-toastification": "~2.0.0-rc.5",
"vuedraggable": "~4.1.0",
"wait-on": "^6.0.1"
}
}

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.4 KiB

After

Width:  |  Height:  |  Size: 893 B

@ -1,8 +1,12 @@
const { checkLogin } = require("./util-server");
const { R } = require("redbean-node");
class TwoFA {
/**
* Disable 2FA for specified user
* @param {number} userID ID of user to disable
* @returns {Promise<void>}
*/
static async disable2FA(userID) {
return await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [
userID,

@ -2,16 +2,19 @@ const basicAuth = require("express-basic-auth");
const passwordHash = require("./password-hash");
const { R } = require("redbean-node");
const { setting } = require("./util-server");
const { debug } = require("../src/util");
const { loginRateLimiter } = require("./rate-limiter");
/**
*
* @param username : string
* @param password : string
* @returns {Promise<Bean|null>}
* Login to web app
* @param {string} username
* @param {string} password
* @returns {Promise<(Bean|null)>}
*/
exports.login = async function (username, password) {
if (typeof username !== "string" || typeof password !== "string") {
return null;
}
let user = await R.findOne("user", " username = ? AND active = 1 ", [
username,
]);
@ -30,32 +33,48 @@ exports.login = async function (username, password) {
return null;
};
/**
* Callback for myAuthorizer
* @callback myAuthorizerCB
* @param {any} err Any error encountered
* @param {boolean} authorized Is the client authorized?
*/
/**
* Custom authorizer for express-basic-auth
* @param {string} username
* @param {string} password
* @param {myAuthorizerCB} callback
*/
function myAuthorizer(username, password, callback) {
setting("disableAuth").then((result) => {
if (result) {
callback(null, true);
} else {
// Login Rate Limit
loginRateLimiter.pass(null, 0).then((pass) => {
if (pass) {
exports.login(username, password).then((user) => {
callback(null, user != null);
if (user == null) {
loginRateLimiter.removeTokens(1);
}
});
} else {
callback(null, false);
// Login Rate Limit
loginRateLimiter.pass(null, 0).then((pass) => {
if (pass) {
exports.login(username, password).then((user) => {
callback(null, user != null);
if (user == null) {
loginRateLimiter.removeTokens(1);
}
});
} else {
callback(null, false);
}
});
}
exports.basicAuth = basicAuth({
authorizer: myAuthorizer,
authorizeAsync: true,
challenge: true,
});
exports.basicAuth = async function (req, res, next) {
const middleware = basicAuth({
authorizer: myAuthorizer,
authorizeAsync: true,
challenge: true,
});
const disabledAuth = await setting("disableAuth");
if (!disabledAuth) {
middleware(req, res, next);
} else {
next();
}
};

@ -0,0 +1,54 @@
const https = require("https");
const http = require("http");
const CacheableLookup = require("cacheable-lookup");
class CacheableDnsHttpAgent {
static cacheable = new CacheableLookup();
static httpAgentList = {};
static httpsAgentList = {};
/**
* Register cacheable to global agents
*/
static registerGlobalAgent() {
this.cacheable.install(http.globalAgent);
this.cacheable.install(https.globalAgent);
}
static install(agent) {
this.cacheable.install(agent);
}
/**
* @var {https.AgentOptions} agentOptions
* @return {https.Agent}
*/
static getHttpsAgent(agentOptions) {
let key = JSON.stringify(agentOptions);
if (!(key in this.httpsAgentList)) {
this.httpsAgentList[key] = new https.Agent(agentOptions);
this.cacheable.install(this.httpsAgentList[key]);
}
return this.httpsAgentList[key];
}
/**
* @var {http.AgentOptions} agentOptions
* @return {https.Agents}
*/
static getHttpAgent(agentOptions) {
let key = JSON.stringify(agentOptions);
if (!(key in this.httpAgentList)) {
this.httpAgentList[key] = new http.Agent(agentOptions);
this.cacheable.install(this.httpAgentList[key]);
}
return this.httpAgentList[key];
}
}
module.exports = {
CacheableDnsHttpAgent,
};

@ -1,11 +1,13 @@
const { setSetting } = require("./util-server");
const { setSetting, setting } = require("./util-server");
const axios = require("axios");
const compareVersions = require("compare-versions");
exports.version = require("../package.json").version;
exports.latestVersion = null;
let interval;
/** Start 48 hour check interval */
exports.startInterval = () => {
let check = async () => {
try {
@ -16,6 +18,19 @@ exports.startInterval = () => {
res.data.slow = "1000.0.0";
}
if (await setting("checkUpdate") === false) {
return;
}
let checkBeta = await setting("checkBeta");
if (checkBeta && res.data.beta) {
if (compareVersions.compare(res.data.beta, res.data.beta, ">")) {
exports.latestVersion = res.data.beta;
return;
}
}
if (res.data.slow) {
exports.latestVersion = res.data.slow;
}
@ -28,6 +43,11 @@ exports.startInterval = () => {
interval = setInterval(check, 3600 * 1000 * 48);
};
/**
* Enable the check update feature
* @param {boolean} value Should the check update feature be enabled?
* @returns {Promise<void>}
*/
exports.enableCheckUpdate = async (value) => {
await setSetting("checkUpdate", value);

@ -3,10 +3,16 @@
*/
const { TimeLogger } = require("../src/util");
const { R } = require("redbean-node");
const { io } = require("./server");
const { UptimeKumaServer } = require("./uptime-kuma-server");
const io = UptimeKumaServer.getInstance().io;
const { setting } = require("./util-server");
const checkVersion = require("./check-version");
/**
* Send list of notification providers to client
* @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>}
*/
async function sendNotificationList(socket) {
const timeLogger = new TimeLogger();
@ -16,7 +22,10 @@ async function sendNotificationList(socket) {
]);
for (let bean of list) {
result.push(bean.export());
let notificationObject = bean.export();
notificationObject.isDefault = (notificationObject.isDefault === 1);
notificationObject.active = (notificationObject.active === 1);
result.push(notificationObject);
}
io.to(socket.userID).emit("notificationList", result);
@ -28,8 +37,11 @@ async function sendNotificationList(socket) {
/**
* Send Heartbeat History list to socket
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param overwrite Overwrite client-side's heartbeat list
* @param {Socket} socket Socket.io instance
* @param {number} monitorID ID of monitor to send heartbeat history
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
* @returns {Promise<void>}
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
@ -55,11 +67,12 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
}
/**
* Important Heart beat list (aka event list)
* @param socket
* @param monitorID
* @param toUser True = send to all browsers with the same user id, False = send to the current browser only
* @param overwrite Overwrite client-side's heartbeat list
* Important Heart beat list (aka event list)
* @param {Socket} socket Socket.io instance
* @param {number} monitorID ID of monitor to send heartbeat history
* @param {boolean} [toUser=false] True = send to all browsers with the same user id, False = send to the current browser only
* @param {boolean} [overwrite=false] Overwrite client-side's heartbeat list
* @returns {Promise<void>}
*/
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
@ -83,6 +96,27 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
}
/**
* Emit proxy list to client
* @param {Socket} socket Socket.io socket instance
* @return {Promise<Bean[]>}
*/
async function sendProxyList(socket) {
const timeLogger = new TimeLogger();
const list = await R.find("proxy", " user_id = ? ", [ socket.userID ]);
io.to(socket.userID).emit("proxyList", list.map(bean => bean.export()));
timeLogger.print("Send Proxy List");
return list;
}
/**
* Emits the version information to the client.
* @param {Socket} socket Socket.io socket instance
* @returns {Promise<void>}
*/
async function sendInfo(socket) {
socket.emit("info", {
version: checkVersion.version,
@ -91,10 +125,35 @@ async function sendInfo(socket) {
});
}
/**
* Send list of docker hosts to client
* @param {Socket} socket Socket.io socket instance
* @returns {Promise<Bean[]>}
*/
async function sendDockerHostList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("docker_host", " user_id = ? ", [
socket.userID,
]);
for (let bean of list) {
result.push(bean.toJSON());
}
io.to(socket.userID).emit("dockerHostList", result);
timeLogger.print("Send Docker Host List");
return list;
}
module.exports = {
sendNotificationList,
sendImportantHeartbeatList,
sendHeartbeatList,
sendInfo
sendProxyList,
sendInfo,
sendDockerHostList
};

@ -1,7 +1,20 @@
const args = require("args-parser")(process.argv);
const demoMode = args["demo"] || false;
const badgeConstants = {
naColor: "#999",
defaultUpColor: "#66c20a",
defaultDownColor: "#c2290a",
defaultPingColor: "blue", // as defined by badge-maker / shields.io
defaultStyle: "flat",
defaultPingValueSuffix: "ms",
defaultPingLabelSuffix: "h",
defaultUptimeValueSuffix: "%",
defaultUptimeLabelSuffix: "h",
};
module.exports = {
args,
demoMode
demoMode,
badgeConstants,
};

@ -1,7 +1,7 @@
const fs = require("fs");
const { R } = require("redbean-node");
const { setSetting, setting } = require("./util-server");
const { debug, sleep } = require("../src/util");
const { log, sleep } = require("../src/util");
const dayjs = require("dayjs");
const knex = require("knex");
@ -53,7 +53,16 @@ class Database {
"patch-2fa-invalidate-used-token.sql": true,
"patch-notification_sent_history.sql": true,
"patch-monitor-basic-auth.sql": true,
}
"patch-add-docker-columns.sql": true,
"patch-status-page.sql": true,
"patch-proxy.sql": true,
"patch-monitor-expiry-notification.sql": true,
"patch-status-page-footer-css.sql": true,
"patch-added-mqtt-monitor.sql": true,
"patch-add-clickable-status-page-link.sql": true,
"patch-add-sqlserver-monitor.sql": true,
"patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] },
};
/**
* The final version should be 10 after merged tag feature
@ -63,6 +72,10 @@ class Database {
static noReject = true;
/**
* Initialize the database
* @param {Object} args Arguments to initialize DB with
*/
static init(args) {
// Data Directory (must be end with "/")
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
@ -77,10 +90,19 @@ class Database {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
console.log(`Data Dir: ${Database.dataDir}`);
log.info("db", `Data Dir: ${Database.dataDir}`);
}
static async connect(testMode = false) {
/**
* Connect to the database
* @param {boolean} [testMode=false] Should the connection be
* started in test mode?
* @param {boolean} [autoloadModels=true] Should models be
* automatically loaded?
* @param {boolean} [noLog=false] Should logs not be output?
* @returns {Promise<void>}
*/
static async connect(testMode = false, autoloadModels = true, noLog = false) {
const acquireConnectionTimeout = 120 * 1000;
const Dialect = require("knex/lib/dialects/sqlite3/index.js");
@ -110,7 +132,10 @@ class Database {
// Auto map the model to a bean object
R.freeze(true);
await R.autoloadModels("./server/model");
if (autoloadModels) {
await R.autoloadModels("./server/model");
}
await R.exec("PRAGMA foreign_keys = ON");
if (testMode) {
@ -123,12 +148,20 @@ class Database {
await R.exec("PRAGMA cache_size = -12000");
await R.exec("PRAGMA auto_vacuum = FULL");
console.log("SQLite config:");
console.log(await R.getAll("PRAGMA journal_mode"));
console.log(await R.getAll("PRAGMA cache_size"));
console.log("SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
// This ensures that an operating system crash or power failure will not corrupt the database.
// FULL synchronous is very safe, but it is also slower.
// Read more: https://sqlite.org/pragma.html#pragma_synchronous
await R.exec("PRAGMA synchronous = FULL");
if (!noLog) {
log.info("db", "SQLite config:");
log.info("db", await R.getAll("PRAGMA journal_mode"));
log.info("db", await R.getAll("PRAGMA cache_size"));
log.info("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
}
/** Patch the database */
static async patch() {
let version = parseInt(await setting("database_version"));
@ -136,33 +169,39 @@ class Database {
version = 0;
}
console.info("Your database version: " + version);
console.info("Latest database version: " + this.latestVersion);
log.info("db", "Your database version: " + version);
log.info("db", "Latest database version: " + this.latestVersion);
if (version === this.latestVersion) {
console.info("Database patch not needed");
log.info("db", "Database patch not needed");
} else if (version > this.latestVersion) {
console.info("Warning: Database version is newer than expected");
log.info("db", "Warning: Database version is newer than expected");
} else {
console.info("Database patch is needed");
log.info("db", "Database patch is needed");
this.backup(version);
try {
this.backup(version);
} catch (e) {
log.error("db", e);
log.error("db", "Unable to create a backup before patching the database. Please make sure you have enough space and permission.");
process.exit(1);
}
// Try catch anything here, if gone wrong, restore the backup
try {
for (let i = version + 1; i <= this.latestVersion; i++) {
const sqlFile = `./db/patch${i}.sql`;
console.info(`Patching ${sqlFile}`);
log.info("db", `Patching ${sqlFile}`);
await Database.importSQLFile(sqlFile);
console.info(`Patched ${sqlFile}`);
log.info("db", `Patched ${sqlFile}`);
await setSetting("database_version", i);
}
} catch (ex) {
await Database.close();
console.error(ex);
console.error("Start Uptime-Kuma failed due to issue patching the database");
console.error("Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
process.exit(1);
@ -170,22 +209,25 @@ class Database {
}
await this.patch2();
await this.migrateNewStatusPage();
}
/**
* Patch DB using new process
* Call it from patch() only
* @private
* @returns {Promise<void>}
*/
static async patch2() {
console.log("Database Patch 2.0 Process");
log.info("db", "Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
if (! databasePatchedFiles) {
databasePatchedFiles = {};
}
debug("Patched files:");
debug(databasePatchedFiles);
log.debug("db", "Patched files:");
log.debug("db", databasePatchedFiles);
try {
for (let sqlFilename in this.patchList) {
@ -193,15 +235,15 @@ class Database {
}
if (this.patched) {
console.log("Database Patched Successfully");
log.info("db", "Database Patched Successfully");
}
} catch (ex) {
await Database.close();
console.error(ex);
console.error("Start Uptime-Kuma failed due to issue patching the database");
console.error("Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
this.restore();
@ -212,24 +254,95 @@ class Database {
}
/**
* Migrate status page value in setting to "status_page" table
* @returns {Promise<void>}
*/
static async migrateNewStatusPage() {
// Fix 1.13.0 empty slug bug
await R.exec("UPDATE status_page SET slug = 'empty-slug-recover' WHERE TRIM(slug) = ''");
let title = await setting("title");
if (title) {
console.log("Migrating Status Page");
let statusPageCheck = await R.findOne("status_page", " slug = 'default' ");
if (statusPageCheck !== null) {
console.log("Migrating Status Page - Skip, default slug record is already existing");
return;
}
let statusPage = R.dispense("status_page");
statusPage.slug = "default";
statusPage.title = title;
statusPage.description = await setting("description");
statusPage.icon = await setting("icon");
statusPage.theme = await setting("statusPageTheme");
statusPage.published = !!await setting("statusPagePublished");
statusPage.search_engine_index = !!await setting("searchEngineIndex");
statusPage.show_tags = !!await setting("statusPageTags");
statusPage.password = null;
if (!statusPage.title) {
statusPage.title = "My Status Page";
}
if (!statusPage.icon) {
statusPage.icon = "";
}
if (!statusPage.theme) {
statusPage.theme = "light";
}
let id = await R.store(statusPage);
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("DELETE FROM setting WHERE type = 'statusPage'");
// Migrate Entry Page if it is status page
let entryPage = await setting("entryPage");
if (entryPage === "statusPage") {
await setSetting("entryPage", "statusPage-default", "general");
}
console.log("Migrating Status Page - Done");
}
}
/**
* Patch database using new patching process
* Used it patch2() only
* @private
* @param sqlFilename
* @param databasePatchedFiles
* @returns {Promise<void>}
*/
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
let value = this.patchList[sqlFilename];
if (! value) {
console.log(sqlFilename + " skip");
log.info("db", sqlFilename + " skip");
return;
}
// Check if patched
if (! databasePatchedFiles[sqlFilename]) {
console.log(sqlFilename + " is not patched");
log.info("db", sqlFilename + " is not patched");
if (value.parents) {
console.log(sqlFilename + " need parents");
log.info("db", sqlFilename + " need parents");
for (let parentSQLFilename of value.parents) {
await this.patch2Recursion(parentSQLFilename, databasePatchedFiles);
}
@ -237,24 +350,24 @@ class Database {
this.backup(dayjs().format("YYYYMMDDHHmmss"));
console.log(sqlFilename + " is patching");
log.info("db", sqlFilename + " is patching");
this.patched = true;
await this.importSQLFile("./db/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
console.log(sqlFilename + " was patched successfully");
log.info("db", sqlFilename + " was patched successfully");
} else {
debug(sqlFilename + " is already patched, skip");
log.debug("db", sqlFilename + " is already patched, skip");
}
}
/**
* Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
* @param filename
* Load an SQL file and execute it
* @param filename Filename of SQL file to import
* @returns {Promise<void>}
*/
static async importSQLFile(filename) {
// Sadly, multi sql statements is not supported by many sqlite libraries, I have to implement it myself
await R.getCell("SELECT 1");
let text = fs.readFileSync(filename).toString();
@ -282,6 +395,10 @@ class Database {
}
}
/**
* Aquire a direct connection to database
* @returns {any}
*/
static getBetterSQLite3Database() {
return R.knex.client.acquireConnection();
}
@ -296,7 +413,7 @@ class Database {
};
process.addListener("unhandledRejection", listener);
console.log("Closing the database");
log.info("db", "Closing the database");
while (true) {
Database.noReject = true;
@ -306,10 +423,10 @@ class Database {
if (Database.noReject) {
break;
} else {
console.log("Waiting to close the database");
log.info("db", "Waiting to close the database");
}
}
console.log("SQLite closed");
log.info("db", "SQLite closed");
process.removeListener("unhandledRejection", listener);
}
@ -317,11 +434,11 @@ class Database {
/**
* One backup one time in this process.
* Reset this.backupPath if you want to backup again
* @param version
* @param {string} version Version code of backup
*/
static backup(version) {
if (! this.backupPath) {
console.info("Backing up the database");
log.info("db", "Backing up the database");
this.backupPath = this.dataDir + "kuma.db.bak" + version;
fs.copyFileSync(Database.path, this.backupPath);
@ -336,15 +453,30 @@ class Database {
this.backupWalPath = walPath + ".bak" + version;
fs.copyFileSync(walPath, this.backupWalPath);
}
// Double confirm if all files actually backup
if (!fs.existsSync(this.backupPath)) {
throw new Error("Backup failed! " + this.backupPath);
}
if (fs.existsSync(shmPath)) {
if (!fs.existsSync(this.backupShmPath)) {
throw new Error("Backup failed! " + this.backupShmPath);
}
}
if (fs.existsSync(walPath)) {
if (!fs.existsSync(this.backupWalPath)) {
throw new Error("Backup failed! " + this.backupWalPath);
}
}
}
}
/**
*
*/
/** Restore from most recent backup */
static restore() {
if (this.backupPath) {
console.error("Patching the database failed!!! Restoring the backup");
log.error("db", "Patching the database failed!!! Restoring the backup");
const shmPath = Database.path + "-shm";
const walPath = Database.path + "-wal";
@ -363,7 +495,7 @@ class Database {
fs.unlinkSync(walPath);
}
} catch (e) {
console.log("Restore failed; you may need to restore the backup manually");
log.error("db", "Restore failed; you may need to restore the backup manually");
process.exit(1);
}
@ -379,17 +511,22 @@ class Database {
}
} else {
console.log("Nothing to restore");
log.info("db", "Nothing to restore");
}
}
/** Get the size of the database */
static getSize() {
debug("Database.getSize()");
log.debug("db", "Database.getSize()");
let stats = fs.statSync(Database.path);
debug(stats);
log.debug("db", stats);
return stats.size;
}
/**
* Shrink the database
* @returns {Promise<void>}
*/
static async shrink() {
await R.exec("VACUUM");
}

@ -0,0 +1,106 @@
const axios = require("axios");
const { R } = require("redbean-node");
const version = require("../package.json").version;
const https = require("https");
class DockerHost {
/**
* Save a docker host
* @param {Object} dockerHost Docker host to save
* @param {?number} dockerHostID ID of the docker host to update
* @param {number} userID ID of the user who adds the docker host
* @returns {Promise<Bean>}
*/
static async save(dockerHost, dockerHostID, userID) {
let bean;
if (dockerHostID) {
bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [ dockerHostID, userID ]);
if (!bean) {
throw new Error("docker host not found");
}
} else {
bean = R.dispense("docker_host");
}
bean.user_id = userID;
bean.docker_daemon = dockerHost.dockerDaemon;
bean.docker_type = dockerHost.dockerType;
bean.name = dockerHost.name;
await R.store(bean);
return bean;
}
/**
* Delete a Docker host
* @param {number} dockerHostID ID of the Docker host to delete
* @param {number} userID ID of the user who created the Docker host
* @returns {Promise<void>}
*/
static async delete(dockerHostID, userID) {
let bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [ dockerHostID, userID ]);
if (!bean) {
throw new Error("docker host not found");
}
// Delete removed proxy from monitors if exists
await R.exec("UPDATE monitor SET docker_host = null WHERE docker_host = ?", [ dockerHostID ]);
await R.trash(bean);
}
/**
* Fetches the amount of containers on the Docker host
* @param {Object} dockerHost Docker host to check for
* @returns {number} Total amount of containers on the host
*/
static async testDockerHost(dockerHost) {
const options = {
url: "/containers/json?all=true",
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: false,
}),
};
if (dockerHost.dockerType === "socket") {
options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") {
options.baseURL = dockerHost.dockerDaemon;
}
let res = await axios.request(options);
if (Array.isArray(res.data)) {
if (res.data.length > 1) {
if ("ImageID" in res.data[0]) {
return res.data.length;
} else {
throw new Error("Invalid Docker response, is it Docker really a daemon?");
}
} else {
return res.data.length;
}
} else {
throw new Error("Invalid Docker response, is it Docker really a daemon?");
}
}
}
module.exports = {
DockerHost,
};

@ -3,12 +3,21 @@
Modified with 0 dependencies
*/
let fs = require("fs");
const { log } = require("../src/util");
let ImageDataURI = (() => {
/**
* Decode the data:image/ URI
* @param {string} dataURI data:image/ URI to decode
* @returns {?Object} An object with properties "imageType" and "dataBase64".
* The former is the image type, e.g., "png", and the latter is a base64
* encoded string of the image's binary data. If it fails to parse, returns
* null instead of an object.
*/
function decode(dataURI) {
if (!/data:image\//.test(dataURI)) {
console.log("ImageDataURI :: Error :: It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
log.error("image-data-uri", "It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
return null;
}
@ -20,9 +29,16 @@ let ImageDataURI = (() => {
};
}
/**
* Endcode an image into data:image/ URI
* @param {(Buffer|string)} data Data to encode
* @param {string} mediaType Media type of data
* @returns {(string|null)} A string representing the base64-encoded
* version of the given Buffer object or null if an error occurred.
*/
function encode(data, mediaType) {
if (!data || !mediaType) {
console.log("ImageDataURI :: Error :: Missing some of the required params: data, mediaType ");
log.error("image-data-uri", "Missing some of the required params: data, mediaType");
return null;
}
@ -33,6 +49,12 @@ let ImageDataURI = (() => {
return dataImgBase64;
}
/**
* Write data URI to file
* @param {string} dataURI data:image/ URI
* @param {string} [filePath] Path to write file to
* @returns {Promise<string>}
*/
function outputFile(dataURI, filePath) {
filePath = filePath || "./";
return new Promise((resolve, reject) => {

@ -1,7 +1,8 @@
const path = require("path");
const Bree = require("bree");
const { SHARE_ENV } = require("worker_threads");
const { log } = require("../src/util");
let bree;
const jobs = [
{
name: "clear-old-data",
@ -9,8 +10,13 @@ const jobs = [
},
];
/**
* Initialize background jobs
* @param {Object} args Arguments to pass to workers
* @returns {Bree}
*/
const initBackgroundJobs = function (args) {
const bree = new Bree({
bree = new Bree({
root: path.resolve("server", "jobs"),
jobs,
worker: {
@ -18,7 +24,7 @@ const initBackgroundJobs = function (args) {
workerData: args,
},
workerMessageHandler: (message) => {
console.log("[Background Job]:", message);
log.info("jobs", message);
}
});
@ -26,6 +32,13 @@ const initBackgroundJobs = function (args) {
return bree;
};
const stopBackgroundJobs = function () {
if (bree) {
bree.stop();
}
};
module.exports = {
initBackgroundJobs
initBackgroundJobs,
stopBackgroundJobs
};

@ -30,7 +30,7 @@ const DEFAULT_KEEP_PERIOD = 180;
try {
await R.exec(
"DELETE FROM heartbeat WHERE time < DATETIME('now', '-' || ? || ' days') ",
[parsedPeriod]
[ parsedPeriod ]
);
} catch (e) {
log(`Failed to clear old data: ${e.message}`);

@ -2,14 +2,24 @@ const { parentPort, workerData } = require("worker_threads");
const Database = require("../database");
const path = require("path");
/**
* Send message to parent process for logging
* since worker_thread does not have access to stdout, this is used
* instead of console.log()
* @param {any} any The message to log
*/
const log = function (any) {
if (parentPort) {
parentPort.postMessage(any);
}
};
/**
* Exit the worker process
* @param {number} error The status code to exit
*/
const exit = function (error) {
if (error && error != 0) {
if (error && error !== 0) {
process.exit(error);
} else {
if (parentPort) {
@ -20,6 +30,7 @@ const exit = function (error) {
}
};
/** Connects to the database */
const connectDb = async function () {
const dbPath = path.join(
process.env.DATA_DIR || workerData["data-dir"] || "./data/"

@ -0,0 +1,19 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class DockerHost extends BeanModel {
/**
* Returns an object that ready to parse to JSON
* @returns {Object}
*/
toJSON() {
return {
id: this.id,
userID: this.user_id,
dockerDaemon: this.docker_daemon,
dockerType: this.docker_type,
name: this.name,
};
}
}
module.exports = DockerHost;

@ -3,12 +3,18 @@ const { R } = require("redbean-node");
class Group extends BeanModel {
async toPublicJSON() {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @param {boolean} [showTags=false] Should the JSON include monitor tags
* @returns {Object}
*/
async toPublicJSON(showTags = false) {
let monitorBeanList = await this.getMonitorList();
let monitorList = [];
for (let bean of monitorBeanList) {
monitorList.push(await bean.toPublicJSON());
monitorList.push(await bean.toPublicJSON(showTags));
}
return {
@ -19,9 +25,13 @@ class Group extends BeanModel {
};
}
/**
* Get all monitors
* @returns {Bean[]}
*/
async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(`
SELECT monitor.* FROM monitor, monitor_group
SELECT monitor.*, monitor_group.send_url FROM monitor, monitor_group
WHERE monitor.id = monitor_group.monitor_id
AND group_id = ?
ORDER BY monitor_group.weight

@ -13,6 +13,11 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
*/
class Heartbeat extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
toPublicJSON() {
return {
status: this.status,
@ -22,6 +27,10 @@ class Heartbeat extends BeanModel {
};
}
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
toJSON() {
return {
monitorID: this.monitor_id,

@ -2,6 +2,11 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
class Incident extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
toPublicJSON() {
return {
id: this.id,

@ -6,14 +6,17 @@ dayjs.extend(utc);
dayjs.extend(timezone);
const axios = require("axios");
const { Prometheus } = require("../prometheus");
const { debug, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, errorLog } = require("../util-server");
const { log, UP, DOWN, PENDING, flipStatus, TimeLogger } = require("../../src/util");
const { tcping, ping, dnsResolve, checkCertificate, checkStatusCode, getTotalClientInRoom, setting, mssqlQuery, postgresQuery, mqttAsync, setSetting, httpNtlm } = require("../util-server");
const { R } = require("redbean-node");
const { BeanModel } = require("redbean-node/dist/bean-model");
const { Notification } = require("../notification");
const { Proxy } = require("../proxy");
const { demoMode } = require("../config");
const version = require("../../package.json").version;
const apicache = require("../modules/apicache");
const { UptimeKumaServer } = require("../uptime-kuma-server");
const { CacheableDnsHttpAgent } = require("../cacheable-dns-http-agent");
/**
* status:
@ -24,20 +27,32 @@ const apicache = require("../modules/apicache");
class Monitor extends BeanModel {
/**
* Return a object that ready to parse to JSON for public
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
return {
async toPublicJSON(showTags = false) {
let obj = {
id: this.id,
name: this.name,
sendUrl: this.sendUrl,
};
if (this.sendUrl) {
obj.url = this.url;
}
if (showTags) {
obj.tags = await this.getTags();
}
return obj;
}
/**
* Return a object that ready to parse to JSON
* Return an object that ready to parse to JSON
* @returns {Object}
*/
async toJSON() {
async toJSON(includeSensitiveData = true) {
let notificationIDList = {};
@ -49,17 +64,13 @@ class Monitor extends BeanModel {
notificationIDList[bean.notification_id] = true;
}
const tags = await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [this.id]);
const tags = await this.getTags();
return {
let data = {
id: this.id,
name: this.name,
url: this.url,
method: this.method,
body: this.body,
headers: this.headers,
basic_auth_user: this.basic_auth_user,
basic_auth_pass: this.basic_auth_pass,
hostname: this.hostname,
port: this.port,
maxretries: this.maxretries,
@ -69,6 +80,7 @@ class Monitor extends BeanModel {
interval: this.interval,
retryInterval: this.retryInterval,
keyword: this.keyword,
expiryNotification: this.isEnabledExpiryNotification(),
ignoreTls: this.getIgnoreTls(),
upsideDown: this.isUpsideDown(),
maxredirects: this.maxredirects,
@ -77,9 +89,42 @@ class Monitor extends BeanModel {
dns_resolve_server: this.dns_resolve_server,
dns_last_result: this.dns_last_result,
pushToken: this.pushToken,
docker_container: this.docker_container,
docker_host: this.docker_host,
proxyId: this.proxy_id,
notificationIDList,
tags: tags,
mqttUsername: this.mqttUsername,
mqttPassword: this.mqttPassword,
mqttTopic: this.mqttTopic,
mqttSuccessMessage: this.mqttSuccessMessage,
databaseConnectionString: this.databaseConnectionString,
databaseQuery: this.databaseQuery,
authMethod: this.authMethod,
authWorkstation: this.authWorkstation,
authDomain: this.authDomain,
};
if (includeSensitiveData) {
data = {
...data,
headers: this.headers,
body: this.body,
basic_auth_user: this.basic_auth_user,
basic_auth_pass: this.basic_auth_pass,
pushToken: this.pushToken,
};
}
return data;
}
/**
* Get all tags applied to this monitor
* @returns {Promise<LooseObject<any>[]>}
*/
async getTags() {
return await R.getAll("SELECT mt.*, tag.name, tag.color FROM monitor_tag mt JOIN tag ON mt.tag_id = tag.id WHERE mt.monitor_id = ?", [ this.id ]);
}
/**
@ -91,6 +136,14 @@ class Monitor extends BeanModel {
return Buffer.from(user + ":" + pass).toString("base64");
}
/**
* Is the TLS expiry notification enabled?
* @returns {boolean}
*/
isEnabledExpiryNotification() {
return Boolean(this.expiryNotification);
}
/**
* Parse to boolean
* @returns {boolean}
@ -107,10 +160,18 @@ class Monitor extends BeanModel {
return Boolean(this.upsideDown);
}
/**
* Get accepted status codes
* @returns {Object}
*/
getAcceptedStatuscodes() {
return JSON.parse(this.accepted_statuscodes_json);
}
/**
* Start monitor
* @param {Server} io Socket server instance
*/
start(io) {
let previousBeat = null;
let retries = 0;
@ -119,11 +180,24 @@ class Monitor extends BeanModel {
const beat = async () => {
let beatInterval = this.interval;
if (! beatInterval) {
beatInterval = 1;
}
if (demoMode) {
if (beatInterval < 20) {
console.log("beat interval too low, reset to 20s");
beatInterval = 20;
}
}
// Expose here for prometheus update
// undefined if not https
let tlsInfo = undefined;
if (! previousBeat) {
if (!previousBeat || this.type === "push") {
previousBeat = await R.findOne("heartbeat", " monitor_id = ? ORDER BY time DESC", [
this.id,
]);
@ -133,7 +207,7 @@ class Monitor extends BeanModel {
let bean = R.dispense("heartbeat");
bean.monitor_id = this.id;
bean.time = R.isoDateTime(dayjs.utc());
bean.time = R.isoDateTimeMillis(dayjs.utc());
bean.status = DOWN;
if (this.isUpsideDown()) {
@ -141,7 +215,7 @@ class Monitor extends BeanModel {
}
// Duration
if (! isFirstBeat) {
if (!isFirstBeat) {
bean.duration = dayjs(bean.time).diff(dayjs(previousBeat.time), "second");
} else {
bean.duration = 0;
@ -154,13 +228,18 @@ class Monitor extends BeanModel {
// HTTP basic auth
let basicAuthHeader = {};
if (this.basic_auth_user) {
if (this.auth_method === "basic") {
basicAuthHeader = {
"Authorization": "Basic " + this.encodeBase64(this.basic_auth_user, this.basic_auth_pass),
};
}
debug(`[${this.name}] Prepare Options for axios`);
const httpsAgentOptions = {
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: !this.getIgnoreTls(),
};
log.debug("monitor", `[${this.name}] Prepare Options for axios`);
const options = {
url: this.url,
@ -173,47 +252,78 @@ class Monitor extends BeanModel {
...(this.headers ? JSON.parse(this.headers) : {}),
...(basicAuthHeader),
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
}),
maxRedirects: this.maxredirects,
validateStatus: (status) => {
return checkStatusCode(status, this.getAcceptedStatuscodes());
},
};
debug(`[${this.name}] Axios Request`);
let res = await axios.request(options);
if (this.proxy_id) {
const proxy = await R.load("proxy", this.proxy_id);
if (proxy && proxy.active) {
const { httpAgent, httpsAgent } = Proxy.createAgents(proxy, {
httpsAgentOptions: httpsAgentOptions,
});
options.proxy = false;
options.httpAgent = httpAgent;
options.httpsAgent = httpsAgent;
}
}
if (!options.httpsAgent) {
options.httpsAgent = new https.Agent(httpsAgentOptions);
}
log.debug("monitor", `[${this.name}] Axios Options: ${JSON.stringify(options)}`);
log.debug("monitor", `[${this.name}] Axios Request`);
let res;
if (this.auth_method === "ntlm") {
options.httpsAgent.keepAlive = true;
res = await httpNtlm(options, {
username: this.basic_auth_user,
password: this.basic_auth_pass,
domain: this.authDomain,
workstation: this.authWorkstation ? this.authWorkstation : undefined
});
} else {
res = await axios.request(options);
}
bean.msg = `${res.status} - ${res.statusText}`;
bean.ping = dayjs().valueOf() - startTime;
// Check certificate if https is used
let certInfoStartTime = dayjs().valueOf();
if (this.getUrl()?.protocol === "https:") {
debug(`[${this.name}] Check cert`);
log.debug("monitor", `[${this.name}] Check cert`);
try {
let tlsInfoObject = checkCertificate(res);
tlsInfo = await this.updateTlsInfo(tlsInfoObject);
if (!this.getIgnoreTls()) {
debug(`[${this.name}] call sendCertNotification`);
if (!this.getIgnoreTls() && this.isEnabledExpiryNotification()) {
log.debug("monitor", `[${this.name}] call sendCertNotification`);
await this.sendCertNotification(tlsInfoObject);
}
} catch (e) {
if (e.message !== "No TLS certificate in response") {
console.error(e.message);
log.error("monitor", "Caught error");
log.error("monitor", e.message);
}
}
}
if (process.env.TIMELOGGER === "1") {
debug("Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
log.debug("monitor", "Cert Info Query Time: " + (dayjs().valueOf() - certInfoStartTime) + "ms");
}
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID == this.id) {
console.log(res.data);
if (process.env.UPTIME_KUMA_LOG_RESPONSE_BODY_MONITOR_ID === this.id) {
log.info("monitor", res.data);
}
if (this.type === "http") {
@ -231,7 +341,11 @@ class Monitor extends BeanModel {
bean.msg += ", keyword is found";
bean.status = UP;
} else {
throw new Error(bean.msg + ", but keyword is not found");
data = data.replace(/<[^>]*>?|[\n\r]|\s+/gm, " ");
if (data.length > 50) {
data = data.substring(0, 47) + "...";
}
throw new Error(bean.msg + ", but keyword is not in [" + data + "]");
}
}
@ -249,27 +363,27 @@ class Monitor extends BeanModel {
let startTime = dayjs().valueOf();
let dnsMessage = "";
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.dns_resolve_type);
let dnsRes = await dnsResolve(this.hostname, this.dns_resolve_server, this.port, this.dns_resolve_type);
bean.ping = dayjs().valueOf() - startTime;
if (this.dns_resolve_type == "A" || this.dns_resolve_type == "AAAA" || this.dns_resolve_type == "TXT") {
if (this.dns_resolve_type === "A" || this.dns_resolve_type === "AAAA" || this.dns_resolve_type === "TXT") {
dnsMessage += "Records: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type == "CNAME" || this.dns_resolve_type == "PTR") {
} else if (this.dns_resolve_type === "CNAME" || this.dns_resolve_type === "PTR") {
dnsMessage = dnsRes[0];
} else if (this.dns_resolve_type == "CAA") {
} else if (this.dns_resolve_type === "CAA") {
dnsMessage = dnsRes[0].issue;
} else if (this.dns_resolve_type == "MX") {
} else if (this.dns_resolve_type === "MX") {
dnsRes.forEach(record => {
dnsMessage += `Hostname: ${record.exchange} - Priority: ${record.priority} | `;
});
dnsMessage = dnsMessage.slice(0, -2);
} else if (this.dns_resolve_type == "NS") {
} else if (this.dns_resolve_type === "NS") {
dnsMessage += "Servers: ";
dnsMessage += dnsRes.join(" | ");
} else if (this.dns_resolve_type == "SOA") {
} else if (this.dns_resolve_type === "SOA") {
dnsMessage += `NS-Name: ${dnsRes.nsname} | Hostmaster: ${dnsRes.hostmaster} | Serial: ${dnsRes.serial} | Refresh: ${dnsRes.refresh} | Retry: ${dnsRes.retry} | Expire: ${dnsRes.expire} | MinTTL: ${dnsRes.minttl}`;
} else if (this.dns_resolve_type == "SRV") {
} else if (this.dns_resolve_type === "SRV") {
dnsRes.forEach(record => {
dnsMessage += `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight} | `;
});
@ -286,25 +400,33 @@ class Monitor extends BeanModel {
bean.msg = dnsMessage;
bean.status = UP;
} else if (this.type === "push") { // Type: Push
const time = R.isoDateTime(dayjs.utc().subtract(this.interval, "second"));
log.debug("monitor", `[${this.name}] Checking monitor at ${dayjs().format("YYYY-MM-DD HH:mm:ss.SSS")}`);
const bufferTime = 1000; // 1s buffer to accommodate clock differences
let heartbeatCount = await R.count("heartbeat", " monitor_id = ? AND time > ? ", [
this.id,
time
]);
if (previousBeat) {
const msSinceLastBeat = dayjs.utc().valueOf() - dayjs.utc(previousBeat.time).valueOf();
debug("heartbeatCount" + heartbeatCount + " " + time);
log.debug("monitor", `[${this.name}] msSinceLastBeat = ${msSinceLastBeat}`);
if (heartbeatCount <= 0) {
// Fix #922, since previous heartbeat could be inserted by api, it should get from database
previousBeat = await Monitor.getPreviousHeartbeat(this.id);
throw new Error("No heartbeat in the time window");
// If the previous beat was down or pending we use the regular
// beatInterval/retryInterval in the setTimeout further below
if (previousBeat.status !== (this.isUpsideDown() ? DOWN : UP) || msSinceLastBeat > beatInterval * 1000 + bufferTime) {
throw new Error("No heartbeat in the time window");
} else {
let timeout = beatInterval * 1000 - msSinceLastBeat;
if (timeout < 0) {
timeout = bufferTime;
} else {
timeout += bufferTime;
}
// No need to insert successful heartbeat for push type, so end here
retries = 0;
log.debug("monitor", `[${this.name}] timeout = ${timeout}`);
this.heartbeatInterval = setTimeout(beat, timeout);
return;
}
} else {
// No need to insert successful heartbeat for push type, so end here
retries = 0;
this.heartbeatInterval = setTimeout(beat, this.interval * 1000);
return;
throw new Error("No heartbeat in the time window");
}
} else if (this.type === "steam") {
@ -322,9 +444,12 @@ class Monitor extends BeanModel {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
},
httpsAgent: new https.Agent({
httpsAgent: CacheableDnsHttpAgent.getHttpsAgent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
rejectUnauthorized: !this.getIgnoreTls(),
}),
httpAgent: CacheableDnsHttpAgent.getHttpAgent({
maxCachedSessions: 0,
}),
maxRedirects: this.maxredirects,
validateStatus: (status) => {
@ -346,7 +471,59 @@ class Monitor extends BeanModel {
} else {
throw new Error("Server not found on Steam");
}
} else if (this.type === "docker") {
log.debug(`[${this.name}] Prepare Options for Axios`);
const dockerHost = await R.load("docker_host", this.docker_host);
const options = {
url: `/containers/${this.docker_container}/json`,
headers: {
"Accept": "*/*",
"User-Agent": "Uptime-Kuma/" + version,
},
httpsAgent: new https.Agent({
maxCachedSessions: 0, // Use Custom agent to disable session reuse (https://github.com/nodejs/node/issues/3940)
rejectUnauthorized: ! this.getIgnoreTls(),
}),
};
if (dockerHost._dockerType === "socket") {
options.socketPath = dockerHost._dockerDaemon;
} else if (dockerHost._dockerType === "tcp") {
options.baseURL = dockerHost._dockerDaemon;
}
log.debug(`[${this.name}] Axios Request`);
let res = await axios.request(options);
if (res.data.State.Running) {
bean.status = UP;
bean.msg = "";
}
} else if (this.type === "mqtt") {
bean.msg = await mqttAsync(this.hostname, this.mqttTopic, this.mqttSuccessMessage, {
port: this.port,
username: this.mqttUsername,
password: this.mqttPassword,
interval: this.interval,
});
bean.status = UP;
} else if (this.type === "sqlserver") {
let startTime = dayjs().valueOf();
await mssqlQuery(this.databaseConnectionString, this.databaseQuery);
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else if (this.type === "postgres") {
let startTime = dayjs().valueOf();
await postgresQuery(this.databaseConnectionString, this.databaseQuery);
bean.msg = "";
bean.status = UP;
bean.ping = dayjs().valueOf() - startTime;
} else {
bean.msg = "Unknown Monitor Type";
bean.status = PENDING;
@ -377,9 +554,7 @@ class Monitor extends BeanModel {
}
}
let beatInterval = this.interval;
debug(`[${this.name}] Check isImportant`);
log.debug("monitor", `[${this.name}] Check isImportant`);
let isImportant = Monitor.isImportantBeat(isFirstBeat, previousBeat?.status, bean.status);
// Mark as important if status changed, ignore pending pings,
@ -387,11 +562,11 @@ class Monitor extends BeanModel {
if (isImportant) {
bean.important = true;
debug(`[${this.name}] sendNotification`);
log.debug("monitor", `[${this.name}] sendNotification`);
await Monitor.sendNotification(isFirstBeat, this, bean);
// Clear Status Page Cache
debug(`[${this.name}] apicache clear`);
log.debug("monitor", `[${this.name}] apicache clear`);
apicache.clear();
} else {
@ -399,55 +574,48 @@ class Monitor extends BeanModel {
}
if (bean.status === UP) {
console.info(`Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
log.debug("monitor", `Monitor #${this.id} '${this.name}': Successful Response: ${bean.ping} ms | Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else if (bean.status === PENDING) {
if (this.retryInterval > 0) {
beatInterval = this.retryInterval;
}
console.warn(`Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
log.warn("monitor", `Monitor #${this.id} '${this.name}': Pending: ${bean.msg} | Max retries: ${this.maxretries} | Retry: ${retries} | Retry Interval: ${beatInterval} seconds | Type: ${this.type}`);
} else {
console.warn(`Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
log.warn("monitor", `Monitor #${this.id} '${this.name}': Failing: ${bean.msg} | Interval: ${beatInterval} seconds | Type: ${this.type}`);
}
debug(`[${this.name}] Send to socket`);
log.debug("monitor", `[${this.name}] Send to socket`);
io.to(this.user_id).emit("heartbeat", bean.toJSON());
Monitor.sendStats(io, this.id, this.user_id);
debug(`[${this.name}] Store`);
log.debug("monitor", `[${this.name}] Store`);
await R.store(bean);
debug(`[${this.name}] prometheus.update`);
log.debug("monitor", `[${this.name}] prometheus.update`);
prometheus.update(bean, tlsInfo);
previousBeat = bean;
if (! this.isStop) {
if (demoMode) {
if (beatInterval < 20) {
console.log("beat interval too low, reset to 20s");
beatInterval = 20;
}
}
debug(`[${this.name}] SetTimeout for next check.`);
log.debug("monitor", `[${this.name}] SetTimeout for next check.`);
this.heartbeatInterval = setTimeout(safeBeat, beatInterval * 1000);
} else {
console.log(`[${this.name}] isStop = true, no next check.`);
log.info("monitor", `[${this.name}] isStop = true, no next check.`);
}
};
/** Get a heartbeat and handle errors */
const safeBeat = async () => {
try {
await beat();
} catch (e) {
console.trace(e);
errorLog(e, false);
console.error("Please report to https://github.com/louislam/uptime-kuma/issues");
UptimeKumaServer.errorLog(e, false);
log.error("monitor", "Please report to https://github.com/louislam/uptime-kuma/issues");
if (! this.isStop) {
console.log("Try to restart the monitor");
log.info("monitor", "Try to restart the monitor");
this.heartbeatInterval = setTimeout(safeBeat, this.interval * 1000);
}
}
@ -463,16 +631,27 @@ class Monitor extends BeanModel {
}
}
/** Stop monitor */
stop() {
clearTimeout(this.heartbeatInterval);
this.isStop = true;
this.prometheus().remove();
}
/**
* Get a new prometheus instance
* @returns {Prometheus}
*/
prometheus() {
return new Prometheus(this);
}
/**
* Helper Method:
* returns URL object for further usage
* returns null if url is invalid
* @returns {null|URL}
* @returns {(null|URL)}
*/
getUrl() {
try {
@ -485,48 +664,54 @@ class Monitor extends BeanModel {
/**
* Store TLS info to database
* @param checkCertificateResult
* @returns {Promise<object>}
* @returns {Promise<Object>}
*/
async updateTlsInfo(checkCertificateResult) {
let tls_info_bean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
let tlsInfoBean = await R.findOne("monitor_tls_info", "monitor_id = ?", [
this.id,
]);
if (tls_info_bean == null) {
tls_info_bean = R.dispense("monitor_tls_info");
tls_info_bean.monitor_id = this.id;
if (tlsInfoBean == null) {
tlsInfoBean = R.dispense("monitor_tls_info");
tlsInfoBean.monitor_id = this.id;
} else {
// Clear sent history if the cert changed.
try {
let oldCertInfo = JSON.parse(tls_info_bean.info_json);
let oldCertInfo = JSON.parse(tlsInfoBean.info_json);
let isValidObjects = oldCertInfo && oldCertInfo.certInfo && checkCertificateResult && checkCertificateResult.certInfo;
if (isValidObjects) {
if (oldCertInfo.certInfo.fingerprint256 !== checkCertificateResult.certInfo.fingerprint256) {
debug("Resetting sent_history");
log.debug("monitor", "Resetting sent_history");
await R.exec("DELETE FROM notification_sent_history WHERE type = 'certificate' AND monitor_id = ?", [
this.id
]);
} else {
debug("No need to reset sent_history");
debug(oldCertInfo.certInfo.fingerprint256);
debug(checkCertificateResult.certInfo.fingerprint256);
log.debug("monitor", "No need to reset sent_history");
log.debug("monitor", oldCertInfo.certInfo.fingerprint256);
log.debug("monitor", checkCertificateResult.certInfo.fingerprint256);
}
} else {
debug("Not valid object");
log.debug("monitor", "Not valid object");
}
} catch (e) { }
}
tls_info_bean.info_json = JSON.stringify(checkCertificateResult);
await R.store(tls_info_bean);
tlsInfoBean.info_json = JSON.stringify(checkCertificateResult);
await R.store(tlsInfoBean);
return checkCertificateResult;
}
/**
* Send statistics to clients
* @param {Server} io Socket server instance
* @param {number} monitorID ID of monitor to send
* @param {number} userID ID of user to send to
*/
static async sendStats(io, monitorID, userID) {
const hasClients = getTotalClientInRoom(io, userID) > 0;
@ -536,13 +721,13 @@ class Monitor extends BeanModel {
await Monitor.sendUptime(24 * 30, io, monitorID, userID);
await Monitor.sendCertInfo(io, monitorID, userID);
} else {
debug("No clients in the room, no need to send stats");
log.debug("monitor", "No clients in the room, no need to send stats");
}
}
/**
*
* @param duration : int Hours
* Send the average ping to user
* @param {number} duration Hours
*/
static async sendAvgPing(duration, io, monitorID, userID) {
const timeLogger = new TimeLogger();
@ -562,12 +747,18 @@ class Monitor extends BeanModel {
io.to(userID).emit("avgPing", monitorID, avgPing);
}
/**
* Send certificate information to client
* @param {Server} io Socket server instance
* @param {number} monitorID ID of monitor to send
* @param {number} userID ID of user to send to
*/
static async sendCertInfo(io, monitorID, userID) {
let tls_info = await R.findOne("monitor_tls_info", "monitor_id = ?", [
let tlsInfo = await R.findOne("monitor_tls_info", "monitor_id = ?", [
monitorID,
]);
if (tls_info != null) {
io.to(userID).emit("certInfo", monitorID, tls_info.info_json);
if (tlsInfo != null) {
io.to(userID).emit("certInfo", monitorID, tlsInfo.info_json);
}
}
@ -575,7 +766,8 @@ class Monitor extends BeanModel {
* Uptime with calculation
* Calculation based on:
* https://www.uptrends.com/support/kb/reporting/calculation-of-uptime-and-downtime
* @param duration : int Hours
* @param {number} duration Hours
* @param {number} monitorID ID of monitor to calculate
*/
static async calcUptime(duration, monitorID) {
const timeLogger = new TimeLogger();
@ -641,13 +833,23 @@ class Monitor extends BeanModel {
/**
* Send Uptime
* @param duration : int Hours
* @param {number} duration Hours
* @param {Server} io Socket server instance
* @param {number} monitorID ID of monitor to send
* @param {number} userID ID of user to send to
*/
static async sendUptime(duration, io, monitorID, userID) {
const uptime = await this.calcUptime(duration, monitorID);
io.to(userID).emit("uptime", monitorID, duration, uptime);
}
/**
* Has status of monitor changed since last beat?
* @param {boolean} isFirstBeat Is this the first beat of this monitor?
* @param {const} previousBeatStatus Status of the previous beat
* @param {const} currentBeatStatus Status of the current beat
* @returns {boolean} True if is an important beat else false
*/
static isImportantBeat(isFirstBeat, previousBeatStatus, currentBeatStatus) {
// * ? -> ANY STATUS = important [isFirstBeat]
// UP -> PENDING = not important
@ -666,6 +868,12 @@ class Monitor extends BeanModel {
return isImportant;
}
/**
* Send a notification about a monitor
* @param {boolean} isFirstBeat Is this beat the first of this monitor?
* @param {Monitor} monitor The monitor to send a notificaton about
* @param {Bean} bean Status information about monitor
*/
static async sendNotification(isFirstBeat, monitor, bean) {
if (!isFirstBeat || bean.status === DOWN) {
const notificationList = await Monitor.getNotificationList(monitor);
@ -681,15 +889,20 @@ class Monitor extends BeanModel {
for (let notification of notificationList) {
try {
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(), bean.toJSON());
await Notification.send(JSON.parse(notification.config), msg, await monitor.toJSON(false), bean.toJSON());
} catch (e) {
console.error("Cannot send notification to " + notification.name);
console.log(e);
log.error("monitor", "Cannot send notification to " + notification.name);
log.error("monitor", e);
}
}
}
}
/**
* Get list of notification providers for a given monitor
* @param {Monitor} monitor Monitor to get notification providers for
* @returns {Promise<LooseObject<any>[]>}
*/
static async getNotificationList(monitor) {
let notificationList = await R.getAll("SELECT notification.* FROM notification, monitor_notification WHERE monitor_id = ? AND monitor_notification.notification_id = notification.id ", [
monitor.id,
@ -697,21 +910,42 @@ class Monitor extends BeanModel {
return notificationList;
}
/**
* Send notification about a certificate
* @param {Object} tlsInfoObject Information about certificate
*/
async sendCertNotification(tlsInfoObject) {
if (tlsInfoObject && tlsInfoObject.certInfo && tlsInfoObject.certInfo.daysRemaining) {
const notificationList = await Monitor.getNotificationList(this);
debug("call sendCertNotificationByTargetDays");
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 21, notificationList);
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 14, notificationList);
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, 7, notificationList);
let notifyDays = await setting("tlsExpiryNotifyDays");
if (notifyDays == null || !Array.isArray(notifyDays)) {
// Reset Default
setSetting("tlsExpiryNotifyDays", [ 7, 14, 21 ], "general");
notifyDays = [ 7, 14, 21 ];
}
if (notifyDays != null && Array.isArray(notifyDays)) {
for (const day of notifyDays) {
log.debug("monitor", "call sendCertNotificationByTargetDays", day);
await this.sendCertNotificationByTargetDays(tlsInfoObject.certInfo.daysRemaining, day, notificationList);
}
}
}
}
/**
* Send a certificate notification when certificate expires in less
* than target days
* @param {number} daysRemaining Number of days remaining on certifcate
* @param {number} targetDays Number of days to alert after
* @param {LooseObject<any>[]} notificationList List of notification providers
* @returns {Promise<void>}
*/
async sendCertNotificationByTargetDays(daysRemaining, targetDays, notificationList) {
if (daysRemaining > targetDays) {
debug(`No need to send cert notification. ${daysRemaining} > ${targetDays}`);
log.debug("monitor", `No need to send cert notification. ${daysRemaining} > ${targetDays}`);
return;
}
@ -725,21 +959,21 @@ class Monitor extends BeanModel {
// Sent already, no need to send again
if (row) {
debug("Sent already, no need to send again");
log.debug("monitor", "Sent already, no need to send again");
return;
}
let sent = false;
debug("Send certificate notification");
log.debug("monitor", "Send certificate notification");
for (let notification of notificationList) {
try {
debug("Sending to " + notification.name);
log.debug("monitor", "Sending to " + notification.name);
await Notification.send(JSON.parse(notification.config), `[${this.name}][${this.url}] Certificate will be expired in ${daysRemaining} days`);
sent = true;
} catch (e) {
console.error("Cannot send cert notification to " + notification.name);
console.error(e);
log.error("monitor", "Cannot send cert notification to " + notification.name);
log.error("monitor", e);
}
}
@ -751,10 +985,15 @@ class Monitor extends BeanModel {
]);
}
} else {
debug("No notification, no need to send cert notification");
log.debug("monitor", "No notification, no need to send cert notification");
}
}
/**
* Get the status of the previous heartbeat
* @param {number} monitorID ID of monitor to check
* @returns {Promise<LooseObject<any>>}
*/
static async getPreviousHeartbeat(monitorID) {
return await R.getRow(`
SELECT status, time FROM heartbeat

@ -0,0 +1,25 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Proxy extends BeanModel {
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
toJSON() {
return {
id: this._id,
userId: this._user_id,
protocol: this._protocol,
host: this._host,
port: this._port,
auth: !!this._auth,
username: this._username,
password: this._password,
active: !!this._active,
default: !!this._default,
createdDate: this._created_date,
};
}
}
module.exports = Proxy;

@ -0,0 +1,264 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
const cheerio = require("cheerio");
const { UptimeKumaServer } = require("../uptime-kuma-server");
class StatusPage extends BeanModel {
/**
* Like this: { "test-uptime.kuma.pet": "default" }
* @type {{}}
*/
static domainMappingList = { };
/**
*
* @param {Response} response
* @param {string} indexHTML
* @param {string} slug
*/
static async handleStatusPageResponse(response, indexHTML, slug) {
let statusPage = await R.findOne("status_page", " slug = ? ", [
slug
]);
if (statusPage) {
response.send(await StatusPage.renderHTML(indexHTML, statusPage));
} else {
response.status(404).send(UptimeKumaServer.getInstance().indexHTML);
}
}
/**
* SSR for status pages
* @param {string} indexHTML
* @param {StatusPage} statusPage
*/
static async renderHTML(indexHTML, statusPage) {
const $ = cheerio.load(indexHTML);
const description155 = statusPage.description?.substring(0, 155);
$("title").text(statusPage.title);
$("meta[name=description]").attr("content", description155);
if (statusPage.icon) {
$("link[rel=icon]")
.attr("href", statusPage.icon)
.removeAttr("type");
$("link[rel=apple-touch-icon]").remove();
}
const head = $("head");
// OG Meta Tags
head.append(`<meta property="og:title" content="${statusPage.title}" />`);
head.append(`<meta property="og:description" content="${description155}" />`);
// Preload data
const json = JSON.stringify(await StatusPage.getStatusPageData(statusPage));
head.append(`
<script>
window.preloadData = ${json}
</script>
`);
// manifest.json
$("link[rel=manifest]").attr("href", `/api/status-page/${statusPage.slug}/manifest.json`);
return $.root().html();
}
/**
* Get all status page data in one call
* @param {StatusPage} statusPage
*/
static async getStatusPageData(statusPage) {
// Incident
let incident = await R.findOne("incident", " pin = 1 AND active = 1 AND status_page_id = ? ", [
statusPage.id,
]);
if (incident) {
incident = incident.toPublicJSON();
}
// Public Group List
const publicGroupList = [];
const showTags = !!statusPage.show_tags;
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [
statusPage.id
]);
for (let groupBean of list) {
let monitorGroup = await groupBean.toPublicJSON(showTags);
publicGroupList.push(monitorGroup);
}
// Response
return {
config: await statusPage.toPublicJSON(),
incident,
publicGroupList
};
}
/**
* Loads domain mapping from DB
* Return object like this: { "test-uptime.kuma.pet": "default" }
* @returns {Promise<void>}
*/
static async loadDomainMappingList() {
StatusPage.domainMappingList = await R.getAssoc(`
SELECT domain, slug
FROM status_page, status_page_cname
WHERE status_page.id = status_page_cname.status_page_id
`);
}
/**
* Send status page list to client
* @param {Server} io io Socket server instance
* @param {Socket} socket Socket.io instance
* @returns {Promise<Bean[]>}
*/
static async sendStatusPageList(io, socket) {
let result = {};
let list = await R.findAll("status_page", " ORDER BY title ");
for (let item of list) {
result[item.id] = await item.toJSON();
}
io.to(socket.userID).emit("statusPageList", result);
return list;
}
/**
* Update list of domain names
* @param {string[]} domainNameList
* @returns {Promise<void>}
*/
async updateDomainNameList(domainNameList) {
if (!Array.isArray(domainNameList)) {
throw new Error("Invalid array");
}
let trx = await R.begin();
await trx.exec("DELETE FROM status_page_cname WHERE status_page_id = ?", [
this.id,
]);
try {
for (let domain of domainNameList) {
if (typeof domain !== "string") {
throw new Error("Invalid domain");
}
if (domain.trim() === "") {
continue;
}
// If the domain name is used in another status page, delete it
await trx.exec("DELETE FROM status_page_cname WHERE domain = ?", [
domain,
]);
let mapping = trx.dispense("status_page_cname");
mapping.status_page_id = this.id;
mapping.domain = domain;
await trx.store(mapping);
}
await trx.commit();
} catch (error) {
await trx.rollback();
throw error;
}
}
/**
* Get list of domain names
* @returns {Object[]}
*/
getDomainNameList() {
let domainList = [];
for (let domain in StatusPage.domainMappingList) {
let s = StatusPage.domainMappingList[domain];
if (this.slug === s) {
domainList.push(domain);
}
}
return domainList;
}
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
async toJSON() {
return {
id: this.id,
slug: this.slug,
title: this.title,
description: this.description,
icon: this.getIcon(),
theme: this.theme,
published: !!this.published,
showTags: !!this.show_tags,
domainNameList: this.getDomainNameList(),
customCSS: this.custom_css,
footerText: this.footer_text,
showPoweredBy: !!this.show_powered_by,
};
}
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Object}
*/
async toPublicJSON() {
return {
slug: this.slug,
title: this.title,
description: this.description,
icon: this.getIcon(),
theme: this.theme,
published: !!this.published,
showTags: !!this.show_tags,
customCSS: this.custom_css,
footerText: this.footer_text,
showPoweredBy: !!this.show_powered_by,
};
}
/**
* Convert slug to status page ID
* @param {string} slug
*/
static async slugToID(slug) {
return await R.getCell("SELECT id FROM status_page WHERE slug = ? ", [
slug
]);
}
/**
* Get path to the icon for the page
* @returns {string}
*/
getIcon() {
if (!this.icon) {
return "/icon.svg";
} else {
return this.icon;
}
}
}
module.exports = StatusPage;

@ -1,6 +1,11 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Tag extends BeanModel {
/**
* Return an object that ready to parse to JSON
* @returns {Object}
*/
toJSON() {
return {
id: this._id,

@ -3,19 +3,30 @@ const passwordHash = require("../password-hash");
const { R } = require("redbean-node");
class User extends BeanModel {
/**
* Direct execute, no need R.store()
* @param newPassword
* Reset user password
* Fix #1510, as in the context reset-password.js, there is no auto model mapping. Call this static function instead.
* @param {number} userID ID of user to update
* @param {string} newPassword
* @returns {Promise<void>}
*/
async resetPassword(newPassword) {
static async resetPassword(userID, newPassword) {
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
passwordHash.generate(newPassword),
this.id
userID
]);
}
/**
* Reset this users password
* @param {string} newPassword
* @returns {Promise<void>}
*/
async resetPassword(newPassword) {
await User.resetPassword(this.id, newPassword);
this.password = newPassword;
}
}
module.exports = User;

@ -13,27 +13,49 @@ let t = {
let instances = [];
/**
* Does a === b
* @param {any} a
* @returns {function(any): boolean}
*/
let matches = function (a) {
return function (b) {
return a === b;
};
};
/**
* Does a!==b
* @param {any} a
* @returns {function(any): boolean}
*/
let doesntMatch = function (a) {
return function (b) {
return !matches(a)(b);
};
};
/**
* Get log duration
* @param {number} d Time in ms
* @param {string} prefix Prefix for log
* @returns {string} Coloured log string
*/
let logDuration = function (d, prefix) {
let str = d > 1000 ? (d / 1000).toFixed(2) + "sec" : d + "ms";
return "\x1b[33m- " + (prefix ? prefix + " " : "") + str + "\x1b[0m";
};
/**
* Get safe headers
* @param {Object} res Express response object
* @returns {Object}
*/
function getSafeHeaders(res) {
return res.getHeaders ? res.getHeaders() : res._headers;
}
/** Constructor for ApiCache instance */
function ApiCache() {
let memCache = new MemoryCache();
@ -68,6 +90,15 @@ function ApiCache() {
instances.push(this);
this.id = instances.length;
/**
* Logs a message to the console if the `DEBUG` environment variable is set.
* @param {string} a The first argument to log.
* @param {string} b The second argument to log.
* @param {string} c The third argument to log.
* @param {string} d The fourth argument to log, and so on... (optional)
*
* Generated by Trelent
*/
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
@ -77,6 +108,13 @@ function ApiCache() {
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
/**
* Returns true if the given request and response should be logged.
* @param {Object} request The HTTP request object.
* @param {Object} response The HTTP response object.
* @param {function(Object, Object):boolean} toggle
* @returns {boolean}
*/
function shouldCacheResponse(request, response, toggle) {
let opt = globalOptions;
let codes = opt.statusCodes;
@ -99,6 +137,11 @@ function ApiCache() {
return true;
}
/**
* Add key to index array
* @param {string} key Key to add
* @param {Object} req Express request object
*/
function addIndexEntries(key, req) {
let groupName = req.apicacheGroup;
@ -111,6 +154,16 @@ function ApiCache() {
index.all.unshift(key);
}
/**
* Returns a new object containing only the whitelisted headers.
* @param {Object} headers The original object of header names and
* values.
* @param {string[]} globalOptions.headerWhitelist An array of
* strings representing the whitelisted header names to keep in the
* output object.
*
* Generated by Trelent
*/
function filterBlacklistedHeaders(headers) {
return Object.keys(headers)
.filter(function (key) {
@ -122,6 +175,14 @@ function ApiCache() {
}, {});
}
/**
* Create a cache object
* @param {Object} headers The response headers to filter.
* @returns {Object} A new object containing only the whitelisted
* response headers.
*
* Generated by Trelent
*/
function createCacheObject(status, headers, data, encoding) {
return {
status: status,
@ -132,6 +193,15 @@ function ApiCache() {
};
}
/**
* Sets a cache value for the given key.
* @param {string} key The cache key to set.
* @param {any} value The cache value to set.
* @param {number} duration How long in milliseconds the cached
* response should be valid for (defaults to 1 hour).
*
* Generated by Trelent
*/
function cacheResponse(key, value, duration) {
let redis = globalOptions.redisClient;
let expireCallback = globalOptions.events.expire;
@ -154,6 +224,13 @@ function ApiCache() {
}, Math.min(duration, 2147483647));
}
/**
* Appends content to the response.
* @param {Object} res Express response object
* @param {(string|Buffer)} content The content to append.
*
* Generated by Trelent
*/
function accumulateContent(res, content) {
if (content) {
if (typeof content == "string") {
@ -179,6 +256,17 @@ function ApiCache() {
}
}
/**
* Monkeypatches the response object to add cache control headers
* and create a cache object.
* @param {Object} req Express request object
* @param {Object} res Express response object
* @param {function} next Function to call next
* @param {string} key Key to add response as
* @param {number} duration Time to cache response for
* @param {string} strDuration Duration in string form
* @param {function(Object, Object):boolean} toggle
*/
function makeResponseCacheable(req, res, next, key, duration, strDuration, toggle) {
// monkeypatch res.end to create cache object
res._apicache = {
@ -245,6 +333,17 @@ function ApiCache() {
next();
}
/**
* Send a cached response to client
* @param {Request} request Express request object
* @param {Response} response Express response object
* @param {object} cacheObject Cache object to send
* @param {function(Object, Object):boolean} toggle
* @param {function} next Function to call next
* @param {number} duration Not used
* @returns {boolean|undefined} true if the request should be
* cached, false otherwise. If undefined, defaults to true.
*/
function sendCachedResponse(request, response, cacheObject, toggle, next, duration) {
if (toggle && !toggle(request, response)) {
return next();
@ -285,12 +384,19 @@ function ApiCache() {
return response.end(data, cacheObject.encoding);
}
/** Sync caching options */
function syncOptions() {
for (let i in middlewareOptions) {
Object.assign(middlewareOptions[i].options, globalOptions, middlewareOptions[i].localOptions);
}
}
/**
* Clear key from cache
* @param {string} target Key to clear
* @param {boolean} isAutomatic Is the key being cleared automatically
* @returns {number}
*/
this.clear = function (target, isAutomatic) {
let group = index.groups[target];
let redis = globalOptions.redisClient;
@ -365,6 +471,14 @@ function ApiCache() {
return this.getIndex();
};
/**
* Converts a duration string to an integer number of milliseconds.
* @param {(string|number)} duration The string to convert.
* @param {number} defaultDuration The default duration to return if
* can't parse duration
* @returns {number} The converted value in milliseconds, or the
* defaultDuration if it can't be parsed.
*/
function parseDuration(duration, defaultDuration) {
if (typeof duration === "number") {
return duration;
@ -387,17 +501,24 @@ function ApiCache() {
return defaultDuration;
}
/**
* Parse duration
* @param {(number|string)} duration
* @returns {number} Duration parsed to a number
*/
this.getDuration = function (duration) {
return parseDuration(duration, globalOptions.defaultDuration);
};
/**
* Return cache performance statistics (hit rate). Suitable for putting into a route:
* Return cache performance statistics (hit rate). Suitable for
* putting into a route:
* <code>
* app.get('/api/cache/performance', (req, res) => {
* res.json(apicache.getPerformance())
* })
* </code>
* @returns {any[]}
*/
this.getPerformance = function () {
return performanceArray.map(function (p) {
@ -405,6 +526,11 @@ function ApiCache() {
});
};
/**
* Get index of a group
* @param {string} group
* @returns {number}
*/
this.getIndex = function (group) {
if (group) {
return index.groups[group];
@ -413,6 +539,14 @@ function ApiCache() {
}
};
/**
* Express middleware
* @param {(string|number)} strDuration Duration to cache responses
* for.
* @param {function(Object, Object):boolean} middlewareToggle
* @param {Object} localOptions Options for APICache
* @returns
*/
this.middleware = function cache(strDuration, middlewareToggle, localOptions) {
let duration = instance.getDuration(strDuration);
let opt = {};
@ -436,63 +570,72 @@ function ApiCache() {
options(localOptions);
/**
* A Function for non tracking performance
*/
* A Function for non tracking performance
*/
function NOOPCachePerformance() {
this.report = this.hit = this.miss = function () {}; // noop;
}
/**
* A function for tracking and reporting hit rate. These statistics are returned by the getPerformance() call above.
*/
* A function for tracking and reporting hit rate. These
* statistics are returned by the getPerformance() call above.
*/
function CachePerformance() {
/**
* Tracks the hit rate for the last 100 requests.
* If there have been fewer than 100 requests, the hit rate just considers the requests that have happened.
*/
* Tracks the hit rate for the last 100 requests. If there
* have been fewer than 100 requests, the hit rate just
* considers the requests that have happened.
*/
this.hitsLast100 = new Uint8Array(100 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 1000 requests.
* If there have been fewer than 1000 requests, the hit rate just considers the requests that have happened.
*/
* Tracks the hit rate for the last 1000 requests. If there
* have been fewer than 1000 requests, the hit rate just
* considers the requests that have happened.
*/
this.hitsLast1000 = new Uint8Array(1000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 10000 requests.
* If there have been fewer than 10000 requests, the hit rate just considers the requests that have happened.
*/
* Tracks the hit rate for the last 10000 requests. If there
* have been fewer than 10000 requests, the hit rate just
* considers the requests that have happened.
*/
this.hitsLast10000 = new Uint8Array(10000 / 4); // each hit is 2 bits
/**
* Tracks the hit rate for the last 100000 requests.
* If there have been fewer than 100000 requests, the hit rate just considers the requests that have happened.
*/
* Tracks the hit rate for the last 100000 requests. If
* there have been fewer than 100000 requests, the hit rate
* just considers the requests that have happened.
*/
this.hitsLast100000 = new Uint8Array(100000 / 4); // each hit is 2 bits
/**
* The number of calls that have passed through the middleware since the server started.
*/
* The number of calls that have passed through the
* middleware since the server started.
*/
this.callCount = 0;
/**
* The total number of hits since the server started
*/
* The total number of hits since the server started
*/
this.hitCount = 0;
/**
* The key from the last cache hit. This is useful in identifying which route these statistics apply to.
*/
* The key from the last cache hit. This is useful in
* identifying which route these statistics apply to.
*/
this.lastCacheHit = null;
/**
* The key from the last cache miss. This is useful in identifying which route these statistics apply to.
*/
* The key from the last cache miss. This is useful in
* identifying which route these statistics apply to.
*/
this.lastCacheMiss = null;
/**
* Return performance statistics
*/
* Return performance statistics
* @returns {Object}
*/
this.report = function () {
return {
lastCacheHit: this.lastCacheHit,
@ -509,10 +652,13 @@ function ApiCache() {
};
/**
* Computes a cache hit rate from an array of hits and misses.
* @param {Uint8Array} array An array representing hits and misses.
* @returns a number between 0 and 1, or null if the array has no hits or misses
*/
* Computes a cache hit rate from an array of hits and
* misses.
* @param {Uint8Array} array An array representing hits and
* misses.
* @returns {?number} a number between 0 and 1, or null if
* the array has no hits or misses
*/
this.hitRate = function (array) {
let hits = 0;
let misses = 0;
@ -538,16 +684,17 @@ function ApiCache() {
};
/**
* Record a hit or miss in the given array. It will be recorded at a position determined
* by the current value of the callCount variable.
* @param {Uint8Array} array An array representing hits and misses.
* @param {boolean} hit true for a hit, false for a miss
* Each element in the array is 8 bits, and encodes 4 hit/miss records.
* Each hit or miss is encoded as to bits as follows:
* 00 means no hit or miss has been recorded in these bits
* 01 encodes a hit
* 10 encodes a miss
*/
* Record a hit or miss in the given array. It will be
* recorded at a position determined by the current value of
* the callCount variable.
* @param {Uint8Array} array An array representing hits and
* misses.
* @param {boolean} hit true for a hit, false for a miss
* Each element in the array is 8 bits, and encodes 4
* hit/miss records. Each hit or miss is encoded as to bits
* as follows: 00 means no hit or miss has been recorded in
* these bits 01 encodes a hit 10 encodes a miss
*/
this.recordHitInArray = function (array, hit) {
let arrayIndex = ~~(this.callCount / 4) % array.length;
let bitOffset = (this.callCount % 4) * 2; // 2 bits per record, 4 records per uint8 array element
@ -557,9 +704,11 @@ function ApiCache() {
};
/**
* Records the hit or miss in the tracking arrays and increments the call count.
* @param {boolean} hit true records a hit, false records a miss
*/
* Records the hit or miss in the tracking arrays and
* increments the call count.
* @param {boolean} hit true records a hit, false records a
* miss
*/
this.recordHit = function (hit) {
this.recordHitInArray(this.hitsLast100, hit);
this.recordHitInArray(this.hitsLast1000, hit);
@ -572,18 +721,18 @@ function ApiCache() {
};
/**
* Records a hit event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache hit
*/
* Records a hit event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache hit
*/
this.hit = function (key) {
this.recordHit(true);
this.lastCacheHit = key;
};
/**
* Records a miss event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache miss
*/
* Records a miss event, setting lastCacheMiss to the given key
* @param {string} key The key that had the cache miss
*/
this.miss = function (key) {
this.recordHit(false);
this.lastCacheMiss = key;
@ -594,6 +743,13 @@ function ApiCache() {
performanceArray.push(perf);
/**
* Cache a request
* @param {Object} req Express request object
* @param {Object} res Express response object
* @param {function} next Function to call next
* @returns {any}
*/
let cache = function (req, res, next) {
function bypass() {
debug("bypass detected, skipping cache.");
@ -701,6 +857,11 @@ function ApiCache() {
return cache;
};
/**
* Process options
* @param {Object} options
* @returns {Object}
*/
this.options = function (options) {
if (options) {
Object.assign(globalOptions, options);
@ -721,6 +882,7 @@ function ApiCache() {
}
};
/** Reset the index */
this.resetIndex = function () {
index = {
all: [],
@ -728,6 +890,11 @@ function ApiCache() {
};
};
/**
* Create a new instance of ApiCache
* @param {Object} config Config to pass
* @returns {ApiCache}
*/
this.newInstance = function (config) {
let instance = new ApiCache();
@ -738,6 +905,7 @@ function ApiCache() {
return instance;
};
/** Clone this instance */
this.clone = function () {
return this.newInstance(this.options());
};

@ -3,6 +3,15 @@ function MemoryCache() {
this.size = 0;
}
/**
*
* @param {string} key Key to store cache as
* @param {any} value Value to store
* @param {number} time Time to store for
* @param {function(any, string)} timeoutCallback Callback to call in
* case of timeout
* @returns {Object}
*/
MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
let old = this.cache[key];
let instance = this;
@ -22,6 +31,11 @@ MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
return entry;
};
/**
* Delete a cache entry
* @param {string} key Key to delete
* @returns {null}
*/
MemoryCache.prototype.delete = function (key) {
let entry = this.cache[key];
@ -36,18 +50,32 @@ MemoryCache.prototype.delete = function (key) {
return null;
};
/**
* Get value of key
* @param {string} key
* @returns {Object}
*/
MemoryCache.prototype.get = function (key) {
let entry = this.cache[key];
return entry;
};
/**
* Get value of cache entry
* @param {string} key
* @returns {any}
*/
MemoryCache.prototype.getValue = function (key) {
let entry = this.get(key);
return entry && entry.value;
};
/**
* Clear cache
* @returns {boolean}
*/
MemoryCache.prototype.clear = function () {
Object.keys(this.cache).forEach(function (key) {
this.delete(key);

@ -0,0 +1,67 @@
const NotificationProvider = require("./notification-provider");
const { DOWN, UP } = require("../../src/util");
const axios = require("axios");
class Alerta extends NotificationProvider {
name = "alerta";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let alertaUrl = `${notification.alertaApiEndpoint}`;
let config = {
headers: {
"Content-Type": "application/json;charset=UTF-8",
"Authorization": "Key " + notification.alertaApiKey,
}
};
let data = {
environment: notification.alertaEnvironment,
severity: "critical",
correlate: [],
service: [ "UptimeKuma" ],
value: "Timeout",
tags: [ "uptimekuma" ],
attributes: {},
origin: "uptimekuma",
type: "exceptionAlert",
};
if (heartbeatJSON == null) {
let postData = Object.assign({
event: "msg",
text: msg,
group: "uptimekuma-msg",
resource: "Message",
}, data);
await axios.post(alertaUrl, postData, config);
} else {
let datadup = Object.assign( {
correlate: [ "service_up", "service_down" ],
event: monitorJSON["type"],
group: "uptimekuma-" + monitorJSON["type"],
resource: monitorJSON["name"],
}, data );
if (heartbeatJSON["status"] === DOWN) {
datadup.severity = notification.alertaAlertState; // critical
datadup.text = "Service " + monitorJSON["type"] + " is down.";
await axios.post(alertaUrl, datadup, config);
} else if (heartbeatJSON["status"] === UP) {
datadup.severity = notification.alertaRecoverState; // cleaned
datadup.text = "Service " + monitorJSON["type"] + " is up.";
await axios.post(alertaUrl, datadup, config);
}
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Alerta;

@ -0,0 +1,50 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { setting } = require("../util-server");
const { getMonitorRelativeURL, UP, DOWN } = require("../../src/util");
class AlertNow extends NotificationProvider {
name = "AlertNow";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let textMsg = "";
let status = "open";
let eventType = "ERROR";
let eventId = new Date().toISOString().slice(0, 10).replace(/-/g, "");
if (heartbeatJSON && heartbeatJSON.status === UP) {
textMsg = `[${heartbeatJSON.name}] ✅ Application is back online`;
status = "close";
eventType = "INFO";
eventId += `_${heartbeatJSON.name.replace(/\s/g, "")}`;
} else if (heartbeatJSON && heartbeatJSON.status === DOWN) {
textMsg = `[${heartbeatJSON.name}] 🔴 Application went down`;
}
textMsg += ` - ${msg}`;
const baseURL = await setting("primaryBaseURL");
if (baseURL && monitorJSON) {
textMsg += ` >> ${baseURL + getMonitorRelativeURL(monitorJSON.id)}`;
}
const data = {
"summary": textMsg,
"status": status,
"event_type": eventType,
"event_id": eventId,
};
await axios.post(notification.alertNowWebhookURL, data);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = AlertNow;

@ -37,6 +37,12 @@ class AliyunSMS extends NotificationProvider {
}
}
/**
* Send the SMS notification
* @param {BeanModel} notification Notification details
* @param {string} msgbody Message template
* @returns {boolean} True if successful else false
*/
async sendSms(notification, msgbody) {
let params = {
PhoneNumbers: notification.phonenumber,
@ -64,13 +70,18 @@ class AliyunSMS extends NotificationProvider {
};
let result = await axios(config);
if (result.data.Message == "OK") {
if (result.data.Message === "OK") {
return true;
}
return false;
}
/** Aliyun request sign */
/**
* Aliyun request sign
* @param {Object} param Parameters object to sign
* @param {string} AccessKeySecret Secret key to sign parameters with
* @returns {string}
*/
sign(param, AccessKeySecret) {
let param2 = {};
let data = [];
@ -82,8 +93,23 @@ class AliyunSMS extends NotificationProvider {
param2[key] = param[key];
}
// Escape more characters than encodeURIComponent does.
// For generating Aliyun signature, all characters except A-Za-z0-9~-._ are encoded.
// See https://help.aliyun.com/document_detail/315526.html
// This encoding methods as known as RFC 3986 (https://tools.ietf.org/html/rfc3986)
let moreEscapesTable = function (m) {
return {
"!": "%21",
"*": "%2A",
"'": "%27",
"(": "%28",
")": "%29"
}[m];
};
for (let key in param2) {
data.push(`${encodeURIComponent(key)}=${encodeURIComponent(param2[key])}`);
let value = encodeURIComponent(param2[key]).replace(/[!*'()]/g, moreEscapesTable);
data.push(`${encodeURIComponent(key)}=${value}`);
}
let StringToSign = `POST&${encodeURIComponent("/")}&${encodeURIComponent(data.join("&"))}`;
@ -93,6 +119,11 @@ class AliyunSMS extends NotificationProvider {
.digest("base64");
}
/**
* Convert status constant to string
* @param {const} status The status constant
* @returns {string}
*/
statusToString(status) {
switch (status) {
case DOWN:

@ -1,14 +1,19 @@
const NotificationProvider = require("./notification-provider");
const child_process = require("child_process");
const childProcess = require("child_process");
class Apprise extends NotificationProvider {
name = "apprise";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let s = child_process.spawnSync("apprise", [ "-vv", "-b", msg, notification.appriseURL])
const args = [ "-vv", "-b", msg, notification.appriseURL ];
if (notification.title) {
args.push("-t");
args.push(notification.title);
}
const s = childProcess.spawnSync("apprise", args);
let output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
const output = (s.stdout) ? s.stdout.toString() : "ERROR: maybe apprise not found";
if (output) {
@ -16,7 +21,7 @@ class Apprise extends NotificationProvider {
return "Sent Successfully";
}
throw new Error(output)
throw new Error(output);
} else {
return "No output from apprise";
}

@ -21,35 +21,35 @@ class Bark extends NotificationProvider {
name = "Bark";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
try {
var barkEndpoint = notification.barkEndpoint;
let barkEndpoint = notification.barkEndpoint;
// check if the endpoint has a "/" suffix, if so, delete it first
if (barkEndpoint.endsWith("/")) {
barkEndpoint = barkEndpoint.substring(0, barkEndpoint.length - 1);
}
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] == UP) {
let title = "UptimeKuma Monitor Up";
return await this.postNotification(title, msg, barkEndpoint);
}
// check if the endpoint has a "/" suffix, if so, delete it first
if (barkEndpoint.endsWith("/")) {
barkEndpoint = barkEndpoint.substring(0, barkEndpoint.length - 1);
}
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] == DOWN) {
let title = "UptimeKuma Monitor Down";
return await this.postNotification(title, msg, barkEndpoint);
}
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === UP) {
let title = "UptimeKuma Monitor Up";
return await this.postNotification(title, msg, barkEndpoint);
}
if (msg != null) {
let title = "UptimeKuma Message";
return await this.postNotification(title, msg, barkEndpoint);
}
if (msg != null && heartbeatJSON != null && heartbeatJSON["status"] === DOWN) {
let title = "UptimeKuma Monitor Down";
return await this.postNotification(title, msg, barkEndpoint);
}
} catch (error) {
throw error;
if (msg != null) {
let title = "UptimeKuma Message";
return await this.postNotification(title, msg, barkEndpoint);
}
}
// add additional parameter for better on device styles (iOS 15 optimized)
/**
* Add additional parameter for better on device styles (iOS 15
* optimized)
* @param {string} postUrl URL to append parameters to
* @returns {string}
*/
appendAdditionalParameters(postUrl) {
// grouping all our notifications
postUrl += "?group=" + barkNotificationGroup;
@ -60,7 +60,11 @@ class Bark extends NotificationProvider {
return postUrl;
}
// thrown if failed to check result, result code should be in range 2xx
/**
* Check if result is successful
* @param {Object} result Axios response object
* @throws {Error} The status code is not in range 2xx
*/
checkResult(result) {
if (result.status == null) {
throw new Error("Bark notification failed with invalid response!");
@ -70,6 +74,13 @@ class Bark extends NotificationProvider {
}
}
/**
* Send the message
* @param {string} title Message title
* @param {string} subtitle Message
* @param {string} endpoint Endpoint to send request to
* @returns {string}
*/
async postNotification(title, subtitle, endpoint) {
// url encode title and subtitle
title = encodeURIComponent(title);

@ -12,7 +12,7 @@ class ClickSendSMS extends NotificationProvider {
let config = {
headers: {
"Content-Type": "application/json",
"Authorization": "Basic " + Buffer.from(notification.clicksendsmsLogin + ":" + notification.clicksendsmsPassword).toString('base64'),
"Authorization": "Basic " + Buffer.from(notification.clicksendsmsLogin + ":" + notification.clicksendsmsPassword).toString("base64"),
"Accept": "text/json",
}
};

@ -37,6 +37,12 @@ class DingDing extends NotificationProvider {
}
}
/**
* Send message to DingDing
* @param {BeanModel} notification
* @param {Object} params Parameters of message
* @returns {boolean} True if successful else false
*/
async sendToDingDing(notification, params) {
let timestamp = Date.now();
@ -50,13 +56,18 @@ class DingDing extends NotificationProvider {
};
let result = await axios(config);
if (result.data.errmsg == "ok") {
if (result.data.errmsg === "ok") {
return true;
}
return false;
}
/** DingDing sign */
/**
* DingDing sign
* @param {Date} timestamp Timestamp of message
* @param {string} secretKey Secret key to sign data with
* @returns {string}
*/
sign(timestamp, secretKey) {
return Crypto
.createHmac("sha256", Buffer.from(secretKey, "utf8"))
@ -64,7 +75,13 @@ class DingDing extends NotificationProvider {
.digest("base64");
}
/**
* Convert status constant to string
* @param {const} status The status constant
* @returns {string}
*/
statusToString(status) {
// TODO: Move to notification-provider.js to avoid repetition in classes
switch (status) {
case DOWN:
return "DOWN";

@ -17,25 +17,32 @@ class Discord extends NotificationProvider {
let discordtestdata = {
username: discordDisplayName,
content: msg,
}
await axios.post(notification.discordWebhookUrl, discordtestdata)
};
await axios.post(notification.discordWebhookUrl, discordtestdata);
return okMsg;
}
let url;
if (monitorJSON["type"] === "port") {
url = monitorJSON["hostname"];
if (monitorJSON["port"]) {
url += ":" + monitorJSON["port"];
}
} else {
url = monitorJSON["url"];
let address;
switch (monitorJSON["type"]) {
case "ping":
address = monitorJSON["hostname"];
break;
case "port":
case "dns":
case "steam":
address = monitorJSON["hostname"];
if (monitorJSON["port"]) {
address += ":" + monitorJSON["port"];
}
break;
default:
address = monitorJSON["url"];
break;
}
// If heartbeatJSON is not null, we go into the normal alerting loop.
if (heartbeatJSON["status"] == DOWN) {
if (heartbeatJSON["status"] === DOWN) {
let discorddowndata = {
username: discordDisplayName,
embeds: [{
@ -48,8 +55,8 @@ class Discord extends NotificationProvider {
value: monitorJSON["name"],
},
{
name: "Service URL",
value: url,
name: monitorJSON["type"] === "push" ? "Service Type" : "Service URL",
value: monitorJSON["type"] === "push" ? "Heartbeat" : address,
},
{
name: "Time (UTC)",
@ -61,16 +68,16 @@ class Discord extends NotificationProvider {
},
],
}],
}
};
if (notification.discordPrefixMessage) {
discorddowndata.content = notification.discordPrefixMessage;
}
await axios.post(notification.discordWebhookUrl, discorddowndata)
await axios.post(notification.discordWebhookUrl, discorddowndata);
return okMsg;
} else if (heartbeatJSON["status"] == UP) {
} else if (heartbeatJSON["status"] === UP) {
let discordupdata = {
username: discordDisplayName,
embeds: [{
@ -83,8 +90,8 @@ class Discord extends NotificationProvider {
value: monitorJSON["name"],
},
{
name: "Service URL",
value: url.startsWith("http") ? "[Visit Service](" + url + ")" : url,
name: monitorJSON["type"] === "push" ? "Service Type" : "Service URL",
value: monitorJSON["type"] === "push" ? "Heartbeat" : address.startsWith("http") ? "[Visit Service](" + address + ")" : address,
},
{
name: "Time (UTC)",
@ -92,21 +99,21 @@ class Discord extends NotificationProvider {
},
{
name: "Ping",
value: heartbeatJSON["ping"] + "ms",
value: heartbeatJSON["ping"] == null ? "N/A" : heartbeatJSON["ping"] + " ms",
},
],
}],
}
};
if (notification.discordPrefixMessage) {
discordupdata.content = notification.discordPrefixMessage;
}
await axios.post(notification.discordWebhookUrl, discordupdata)
await axios.post(notification.discordWebhookUrl, discordupdata);
return okMsg;
}
} catch (error) {
this.throwGeneralAxiosError(error)
this.throwGeneralAxiosError(error);
}
}

@ -21,7 +21,7 @@ class Feishu extends NotificationProvider {
return okMsg;
}
if (heartbeatJSON["status"] == DOWN) {
if (heartbeatJSON["status"] === DOWN) {
let downdata = {
msg_type: "post",
content: {
@ -48,7 +48,7 @@ class Feishu extends NotificationProvider {
return okMsg;
}
if (heartbeatJSON["status"] == UP) {
if (heartbeatJSON["status"] === UP) {
let updata = {
msg_type: "post",
content: {

@ -13,11 +13,11 @@ class GoogleChat extends NotificationProvider {
try {
// Google Chat message formatting: https://developers.google.com/chat/api/guides/message-formats/basic
let textMsg = ''
let textMsg = "";
if (heartbeatJSON && heartbeatJSON.status === UP) {
textMsg = `✅ Application is back online\n`;
textMsg = "✅ Application is back online\n";
} else if (heartbeatJSON && heartbeatJSON.status === DOWN) {
textMsg = `🔴 Application went down\n`;
textMsg = "🔴 Application went down\n";
}
if (monitorJSON && monitorJSON.name) {

@ -0,0 +1,42 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class Gorush extends NotificationProvider {
name = "gorush";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let platformMapping = {
"ios": 1,
"android": 2,
"huawei": 3,
};
try {
let data = {
"notifications": [
{
"tokens": [ notification.gorushDeviceToken ],
"platform": platformMapping[notification.gorushPlatform],
"message": msg,
// Optional
"title": notification.gorushTitle,
"priority": notification.gorushPriority,
"retry": parseInt(notification.gorushRetry) || 0,
"topic": notification.gorushTopic,
}
]
};
let config = {};
await axios.post(`${notification.gorushServerURL}/api/push`, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Gorush;

@ -15,7 +15,7 @@ class Gotify extends NotificationProvider {
"message": msg,
"priority": notification.gotifyPriority || 8,
"title": "Uptime-Kuma",
})
});
return okMsg;

@ -25,9 +25,9 @@ class Line extends NotificationProvider {
"text": "Test Successful!"
}
]
}
await axios.post(lineAPIUrl, testMessage, config)
} else if (heartbeatJSON["status"] == DOWN) {
};
await axios.post(lineAPIUrl, testMessage, config);
} else if (heartbeatJSON["status"] === DOWN) {
let downMessage = {
"to": notification.lineUserID,
"messages": [
@ -36,9 +36,9 @@ class Line extends NotificationProvider {
"text": "UptimeKuma Alert: [🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
}
]
}
await axios.post(lineAPIUrl, downMessage, config)
} else if (heartbeatJSON["status"] == UP) {
};
await axios.post(lineAPIUrl, downMessage, config);
} else if (heartbeatJSON["status"] === UP) {
let upMessage = {
"to": notification.lineUserID,
"messages": [
@ -47,12 +47,12 @@ class Line extends NotificationProvider {
"text": "UptimeKuma Alert: [✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
}
]
}
await axios.post(lineAPIUrl, upMessage, config)
};
await axios.post(lineAPIUrl, upMessage, config);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error)
this.throwGeneralAxiosError(error);
}
}
}

@ -0,0 +1,43 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const qs = require("qs");
const { DOWN, UP } = require("../../src/util");
class LineNotify extends NotificationProvider {
name = "LineNotify";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let lineAPIUrl = "https://notify-api.line.me/api/notify";
let config = {
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Bearer " + notification.lineNotifyAccessToken
}
};
if (heartbeatJSON == null) {
let testMessage = {
"message": msg,
};
await axios.post(lineAPIUrl, qs.stringify(testMessage), config);
} else if (heartbeatJSON["status"] === DOWN) {
let downMessage = {
"message": "\n[🔴 Down]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
};
await axios.post(lineAPIUrl, qs.stringify(downMessage), config);
} else if (heartbeatJSON["status"] === UP) {
let upMessage = {
"message": "\n[✅ Up]\n" + "Name: " + monitorJSON["name"] + " \n" + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"]
};
await axios.post(lineAPIUrl, qs.stringify(upMessage), config);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = LineNotify;

@ -8,38 +8,38 @@ class LunaSea extends NotificationProvider {
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice
let lunaseadevice = "https://notify.lunasea.app/v1/custom/device/" + notification.lunaseaDevice;
try {
if (heartbeatJSON == null) {
let testdata = {
"title": "Uptime Kuma Alert",
"body": "Testing Successful.",
}
await axios.post(lunaseadevice, testdata)
"body": msg,
};
await axios.post(lunaseadevice, testdata);
return okMsg;
}
if (heartbeatJSON["status"] == DOWN) {
if (heartbeatJSON["status"] === DOWN) {
let downdata = {
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}
await axios.post(lunaseadevice, downdata)
};
await axios.post(lunaseadevice, downdata);
return okMsg;
}
if (heartbeatJSON["status"] == UP) {
if (heartbeatJSON["status"] === UP) {
let updata = {
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}
await axios.post(lunaseadevice, updata)
};
await axios.post(lunaseadevice, updata);
return okMsg;
}
} catch (error) {
this.throwGeneralAxiosError(error)
this.throwGeneralAxiosError(error);
}
}

@ -1,7 +1,7 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const Crypto = require("crypto");
const { debug } = require("../../src/util");
const { log } = require("../../src/util");
class Matrix extends NotificationProvider {
name = "matrix";
@ -17,11 +17,11 @@ class Matrix extends NotificationProvider {
.slice(0, size)
);
debug("Random String: " + randomString);
log.debug("notification", "Random String: " + randomString);
const roomId = encodeURIComponent(notification.internalRoomId);
debug("Matrix Room ID: " + roomId);
log.debug("notification", "Matrix Room ID: " + roomId);
try {
let config = {

@ -15,16 +15,21 @@ class Mattermost extends NotificationProvider {
let mattermostTestData = {
username: mattermostUserName,
text: msg,
}
await axios.post(notification.mattermostWebhookUrl, mattermostTestData)
};
await axios.post(notification.mattermostWebhookUrl, mattermostTestData);
return okMsg;
}
const mattermostChannel = notification.mattermostchannel.toLowerCase();
let mattermostChannel;
if (typeof notification.mattermostchannel === "string") {
mattermostChannel = notification.mattermostchannel.toLowerCase();
}
const mattermostIconEmoji = notification.mattermosticonemo;
const mattermostIconUrl = notification.mattermosticonurl;
if (heartbeatJSON["status"] == DOWN) {
if (heartbeatJSON["status"] === DOWN) {
let mattermostdowndata = {
username: mattermostUserName,
text: "Uptime Kuma Alert",
@ -68,7 +73,7 @@ class Mattermost extends NotificationProvider {
mattermostdowndata
);
return okMsg;
} else if (heartbeatJSON["status"] == UP) {
} else if (heartbeatJSON["status"] === UP) {
let mattermostupdata = {
username: mattermostUserName,
text: "Uptime Kuma Alert",

@ -7,17 +7,23 @@ class NotificationProvider {
name = undefined;
/**
* @param notification : BeanModel
* @param msg : string General Message
* @param monitorJSON : object Monitor details (For Up/Down only)
* @param heartbeatJSON : object Heartbeat details (For Up/Down only)
* Send a notification
* @param {BeanModel} notification
* @param {string} msg General Message
* @param {?Object} monitorJSON Monitor details (For Up/Down only)
* @param {?Object} heartbeatJSON Heartbeat details (For Up/Down only)
* @returns {Promise<string>} Return Successful Message
* Throw Error with fail msg
* @throws Error with fail msg
*/
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
throw new Error("Have to override Notification.send(...)");
}
/**
* Throws an error
* @param {any} error The error to throw
* @throws {any} The error specified
*/
throwGeneralAxiosError(error) {
let msg = "Error: " + error + " ";
@ -25,11 +31,11 @@ class NotificationProvider {
if (typeof error.response.data === "string") {
msg += error.response.data;
} else {
msg += JSON.stringify(error.response.data)
msg += JSON.stringify(error.response.data);
}
}
throw new Error(msg)
throw new Error(msg);
}
}

@ -0,0 +1,26 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class Ntfy extends NotificationProvider {
name = "ntfy";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
await axios.post(`${notification.ntfyserverurl}`, {
"topic": notification.ntfytopic,
"message": msg,
"priority": notification.ntfyPriority || 4,
"title": "Uptime-Kuma",
});
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = Ntfy;

@ -10,7 +10,7 @@ class Octopush extends NotificationProvider {
try {
// Default - V2
if (notification.octopushVersion == 2 || !notification.octopushVersion) {
if (notification.octopushVersion === 2 || !notification.octopushVersion) {
let config = {
headers: {
"api-key": notification.octopushAPIKey,
@ -30,14 +30,14 @@ class Octopush extends NotificationProvider {
"purpose": "alert",
"sender": notification.octopushSenderName
};
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config)
} else if (notification.octopushVersion == 1) {
await axios.post("https://api.octopush.com/v1/public/sms-campaign/send", data, config);
} else if (notification.octopushVersion === 1) {
let data = {
"user_login": notification.octopushDMLogin,
"api_key": notification.octopushDMAPIKey,
"sms_recipients": notification.octopushDMPhoneNumber,
"sms_sender": notification.octopushDMSenderName,
"sms_type": (notification.octopushDMSMSType == "sms_premium") ? "FR" : "XXX",
"sms_type": (notification.octopushDMSMSType === "sms_premium") ? "FR" : "XXX",
"transactional": "1",
//octopush not supporting non ascii char
"sms_text": msg.replace(/[^\x00-\x7F]/g, ""),
@ -49,7 +49,7 @@ class Octopush extends NotificationProvider {
},
params: data
};
await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config)
await axios.post("https://www.octopush-dm.com/api/sms/json", {}, config);
} else {
throw new Error("Unknown Octopush version!");
}

@ -0,0 +1,45 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
class OneBot extends NotificationProvider {
name = "OneBot";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
try {
let httpAddr = notification.httpAddr;
if (!httpAddr.startsWith("http")) {
httpAddr = "http://" + httpAddr;
}
if (!httpAddr.endsWith("/")) {
httpAddr += "/";
}
let onebotAPIUrl = httpAddr + "send_msg";
let config = {
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + notification.accessToken,
}
};
let pushText = "UptimeKuma Alert: " + msg;
let data = {
"auto_escape": true,
"message": pushText,
};
if (notification.msgType === "group") {
data["message_type"] = "group";
data["group_id"] = notification.recieverId;
} else {
data["message_type"] = "private";
data["user_id"] = notification.recieverId;
}
await axios.post(onebotAPIUrl, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = OneBot;

@ -0,0 +1,113 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { UP, DOWN, getMonitorRelativeURL } = require("../../src/util");
const { setting } = require("../util-server");
let successMessage = "Sent Successfully.";
class PagerDuty extends NotificationProvider {
name = "PagerDuty";
/**
* @inheritdoc
*/
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
try {
if (heartbeatJSON == null) {
const title = "Uptime Kuma Alert";
const monitor = {
type: "ping",
url: "Uptime Kuma Test Button",
};
return this.postNotification(notification, title, msg, monitor);
}
if (heartbeatJSON.status === UP) {
const title = "Uptime Kuma Monitor ✅ Up";
const eventAction = notification.pagerdutyAutoResolve || null;
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, eventAction);
}
if (heartbeatJSON.status === DOWN) {
const title = "Uptime Kuma Monitor 🔴 Down";
return this.postNotification(notification, title, heartbeatJSON.msg, monitorJSON, "trigger");
}
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
/**
* Check if result is successful, result code should be in range 2xx
* @param {Object} result Axios response object
* @throws {Error} The status code is not in range 2xx
*/
checkResult(result) {
if (result.status == null) {
throw new Error("PagerDuty notification failed with invalid response!");
}
if (result.status < 200 || result.status >= 300) {
throw new Error("PagerDuty notification failed with status code " + result.status);
}
}
/**
* Send the message
* @param {BeanModel} notification Message title
* @param {string} title Message title
* @param {string} body Message
* @param {Object} monitorInfo Monitor details (For Up/Down only)
* @param {?string} eventAction Action event for PagerDuty (trigger, acknowledge, resolve)
* @returns {string}
*/
async postNotification(notification, title, body, monitorInfo, eventAction = "trigger") {
if (eventAction == null) {
return "No action required";
}
let monitorUrl;
if (monitorInfo.type === "port") {
monitorUrl = monitorInfo.hostname;
if (monitorInfo.port) {
monitorUrl += ":" + monitorInfo.port;
}
} else if (monitorInfo.hostname != null) {
monitorUrl = monitorInfo.hostname;
} else {
monitorUrl = monitorInfo.url;
}
const options = {
method: "POST",
url: notification.pagerdutyIntegrationUrl,
headers: { "Content-Type": "application/json" },
data: {
payload: {
summary: `[${title}] [${monitorInfo.name}] ${body}`,
severity: notification.pagerdutyPriority || "warning",
source: monitorUrl,
},
routing_key: notification.pagerdutyIntegrationKey,
event_action: eventAction,
dedup_key: "Uptime Kuma/" + monitorInfo.id,
}
};
const baseURL = await setting("primaryBaseURL");
if (baseURL && monitorInfo) {
options.client = "Uptime Kuma";
options.client_url = baseURL + getMonitorRelativeURL(monitorInfo.id);
}
let result = await axios.request(options);
this.checkResult(result);
if (result.statusText != null) {
return "PagerDuty notification succeed: " + result.statusText;
}
return successMessage;
}
}
module.exports = PagerDuty;

@ -12,7 +12,7 @@ class PromoSMS extends NotificationProvider {
let config = {
headers: {
"Content-Type": "application/json",
"Authorization": "Basic " + Buffer.from(notification.promosmsLogin + ":" + notification.promosmsPassword).toString('base64'),
"Authorization": "Basic " + Buffer.from(notification.promosmsLogin + ":" + notification.promosmsPassword).toString("base64"),
"Accept": "text/json",
}
};
@ -30,7 +30,7 @@ class PromoSMS extends NotificationProvider {
let error = "Something gone wrong. Api returned " + resp.data.response.status + ".";
this.throwGeneralAxiosError(error);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);

@ -23,26 +23,26 @@ class Pushbullet extends NotificationProvider {
"type": "note",
"title": "Uptime Kuma Alert",
"body": "Testing Successful.",
}
await axios.post(pushbulletUrl, testdata, config)
} else if (heartbeatJSON["status"] == DOWN) {
};
await axios.post(pushbulletUrl, testdata, config);
} else if (heartbeatJSON["status"] === DOWN) {
let downdata = {
"type": "note",
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[🔴 Down] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}
await axios.post(pushbulletUrl, downdata, config)
} else if (heartbeatJSON["status"] == UP) {
};
await axios.post(pushbulletUrl, downdata, config);
} else if (heartbeatJSON["status"] === UP) {
let updata = {
"type": "note",
"title": "UptimeKuma Alert: " + monitorJSON["name"],
"body": "[✅ Up] " + heartbeatJSON["msg"] + "\nTime (UTC): " + heartbeatJSON["time"],
}
await axios.post(pushbulletUrl, updata, config)
};
await axios.post(pushbulletUrl, updata, config);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error)
this.throwGeneralAxiosError(error);
}
}
}

@ -0,0 +1,52 @@
const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const { DOWN, UP } = require("../../src/util");
class PushDeer extends NotificationProvider {
name = "PushDeer";
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
let okMsg = "Sent Successfully.";
let pushdeerlink = "https://api2.pushdeer.com/message/push";
let valid = msg != null && monitorJSON != null && heartbeatJSON != null;
let title;
if (valid && heartbeatJSON.status === UP) {
title = "## Uptime Kuma: " + monitorJSON.name + " up";
} else if (valid && heartbeatJSON.status === DOWN) {
title = "## Uptime Kuma: " + monitorJSON.name + " down";
} else {
title = "## Uptime Kuma Message";
}
let data = {
"pushkey": notification.pushdeerKey,
"text": title,
"desp": msg.replace(/\n/g, "\n\n"),
"type": "markdown",
};
try {
let res = await axios.post(pushdeerlink, data);
if ("error" in res.data) {
let error = res.data.error;
this.throwGeneralAxiosError(error);
}
if (res.data.content.result.length === 0) {
let error = "Invalid PushDeer key";
this.throwGeneralAxiosError(error);
} else if (JSON.parse(res.data.content.result[0]).success !== "ok") {
let error = "Unknown error";
this.throwGeneralAxiosError(error);
}
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}
}
}
module.exports = PushDeer;

@ -9,36 +9,31 @@ class Pushover extends NotificationProvider {
let okMsg = "Sent Successfully.";
let pushoverlink = "https://api.pushover.net/1/messages.json";
let data = {
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg,
"user": notification.pushoveruserkey,
"token": notification.pushoverapptoken,
"sound": notification.pushoversounds,
"priority": notification.pushoverpriority,
"title": notification.pushovertitle,
"retry": "30",
"expire": "3600",
"html": 1,
};
if (notification.pushoverdevice) {
data.device = notification.pushoverdevice;
}
try {
if (heartbeatJSON == null) {
let data = {
"message": msg,
"user": notification.pushoveruserkey,
"token": notification.pushoverapptoken,
"sound": notification.pushoversounds,
"priority": notification.pushoverpriority,
"title": notification.pushovertitle,
"retry": "30",
"expire": "3600",
"html": 1,
};
await axios.post(pushoverlink, data);
return okMsg;
} else {
data.message += "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"];
await axios.post(pushoverlink, data);
return okMsg;
}
let data = {
"message": "<b>Uptime Kuma Alert</b>\n\n<b>Message</b>:" + msg + "\n<b>Time (UTC)</b>:" + heartbeatJSON["time"],
"user": notification.pushoveruserkey,
"token": notification.pushoverapptoken,
"sound": notification.pushoversounds,
"priority": notification.pushoverpriority,
"title": notification.pushovertitle,
"retry": "30",
"expire": "3600",
"html": 1,
};
await axios.post(pushoverlink, data);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error);
}

@ -19,10 +19,10 @@ class Pushy extends NotificationProvider {
"badge": 1,
"sound": "ping.aiff"
}
})
});
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error)
this.throwGeneralAxiosError(error);
}
}
}

@ -2,7 +2,7 @@ const NotificationProvider = require("./notification-provider");
const axios = require("axios");
const Slack = require("./slack");
const { setting } = require("../util-server");
const { getMonitorRelativeURL, UP, DOWN } = require("../../src/util");
const { getMonitorRelativeURL, DOWN } = require("../../src/util");
class RocketChat extends NotificationProvider {

@ -16,10 +16,10 @@ class Signal extends NotificationProvider {
};
let config = {};
await axios.post(notification.signalURL, data, config)
await axios.post(notification.signalURL, data, config);
return okMsg;
} catch (error) {
this.throwGeneralAxiosError(error)
this.throwGeneralAxiosError(error);
}
}
}

@ -10,6 +10,7 @@ class Slack extends NotificationProvider {
/**
* Deprecated property notification.slackbutton
* Set it as primary base url if this is not yet set.
* @param {string} url The primary base URL to use
*/
static async deprecateURL(url) {
let currentPrimaryBaseURL = await setting("primaryBaseURL");

@ -1,6 +1,6 @@
const nodemailer = require("nodemailer");
const NotificationProvider = require("./notification-provider");
const { DOWN, UP } = require("../../src/util");
const { DOWN } = require("../../src/util");
class SMTP extends NotificationProvider {

@ -5,6 +5,12 @@ const { DOWN, UP } = require("../../src/util");
class Teams extends NotificationProvider {
name = "teams";
/**
* Generate the message to send
* @param {const} status The status constant
* @param {string} monitorName Name of monitor
* @returns {string}
*/
_statusMessageFactory = (status, monitorName) => {
if (status === DOWN) {
return `🔴 Application [${monitorName}] went down`;
@ -14,6 +20,11 @@ class Teams extends NotificationProvider {
return "Notification";
};
/**
* Select theme color to use based on status
* @param {const} status The status constant
* @returns {string} Selected color in hex RGB format
*/
_getThemeColor = (status) => {
if (status === DOWN) {
return "ff0000";
@ -24,6 +35,14 @@ class Teams extends NotificationProvider {
return "008cff";
};
/**
* Generate payload for notification
* @param {const} status The status of the monitor
* @param {string} monitorMessage Message to send
* @param {string} monitorName Name of monitor affected
* @param {string} monitorUrl URL of monitor affected
* @returns {Object}
*/
_notificationPayloadFactory = ({
status,
monitorMessage,
@ -74,10 +93,21 @@ class Teams extends NotificationProvider {
};
};
/**
* Send the notification
* @param {string} webhookUrl URL to send the request to
* @param {Object} payload Payload generated by _notificationPayloadFactory
*/
_sendNotification = async (webhookUrl, payload) => {
await axios.post(webhookUrl, payload);
};
/**
* Send a general notification
* @param {string} webhookUrl URL to send request to
* @param {string} msg Message to send
* @returns {Promise<void>}
*/
_handleGeneralNotification = (webhookUrl, msg) => {
const payload = this._notificationPayloadFactory({
monitorMessage: msg

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save