Merge branch 'master' into monitor_async_start_stop

pull/2830/head
Frank Elsinga 10 months ago committed by GitHub
commit 89158be7de
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -0,0 +1,28 @@
# Codespaces
You can modifiy Uptime Kuma in your browser without setting up a local development.
![image](https://github.com/louislam/uptime-kuma/assets/1336778/31d9f06d-dd0b-4405-8e0d-a96586ee4595)
1. Click `Code` -> `Create codespace on master`
2. Wait a few minutes until you see there are two exposed ports
3. Go to the `3000` url, see if it is working
![image](https://github.com/louislam/uptime-kuma/assets/1336778/909b2eb4-4c5e-44e4-ac26-6d20ed856e7f)
## Frontend
Since the frontend is using [Vite.js](https://vitejs.dev/), all changes in this area will be hot-reloaded.
You don't need to restart the frontend, unless you try to add a new frontend dependency.
## Backend
The backend does not automatically hot-reload.
You will need to restart the backend after changing something using these steps:
1. Click `Terminal`
2. Click `Codespaces: server-dev` in the right panel
3. Press `Ctrl + C` to stop the server
4. Press `Up` to run `npm run start-server-dev`
![image](https://github.com/louislam/uptime-kuma/assets/1336778/e0c0a350-fe46-4588-9f37-e053c85834d1)

@ -0,0 +1,23 @@
{
"image": "mcr.microsoft.com/devcontainers/javascript-node:dev-18-bookworm",
"features": {
"ghcr.io/devcontainers/features/github-cli:1": {}
},
"updateContentCommand": "npm ci",
"postCreateCommand": "",
"postAttachCommand": {
"frontend-dev": "npm run start-frontend-devcontainer",
"server-dev": "npm run start-server-dev",
"open-port": "gh codespace ports visibility 3001:public -c $CODESPACE_NAME"
},
"customizations": {
"vscode": {
"extensions": [
"streetsidesoftware.code-spell-checker",
"dbaeumer.vscode-eslint",
"GitHub.copilot-chat"
]
}
},
"forwardPorts": [3000, 3001]
}

@ -1,6 +1,6 @@
/.idea /.idea
/node_modules /node_modules
/data /data*
/cypress /cypress
/out /out
/test /test
@ -18,6 +18,7 @@ README.md
.vscode .vscode
.eslint* .eslint*
.stylelint* .stylelint*
/.devcontainer
/.github /.github
yarn.lock yarn.lock
app.json app.json
@ -29,11 +30,16 @@ SECURITY.md
tsconfig.json tsconfig.json
.env .env
/tmp /tmp
/babel.config.js
/ecosystem.config.js /ecosystem.config.js
/extra/healthcheck.exe /extra/healthcheck.exe
/extra/healthcheck /extra/healthcheck
extra/exe-builder /extra/exe-builder
/extra/push-examples
/extra/uptime-kuma-push
# Comment the following line if you want to rebuild the healthcheck binary
/extra/healthcheck-armv7
### .gitignore content (commented rules are duplicated) ### .gitignore content (commented rules are duplicated)

@ -1,6 +1,7 @@
module.exports = { module.exports = {
ignorePatterns: [ ignorePatterns: [
"test/*", "test/*.js",
"test/cypress",
"server/modules/apicache/*", "server/modules/apicache/*",
"src/util.js" "src/util.js"
], ],
@ -14,13 +15,18 @@ module.exports = {
extends: [ extends: [
"eslint:recommended", "eslint:recommended",
"plugin:vue/vue3-recommended", "plugin:vue/vue3-recommended",
"plugin:jsdoc/recommended-error",
], ],
parser: "vue-eslint-parser", parser: "vue-eslint-parser",
parserOptions: { parserOptions: {
parser: "@babel/eslint-parser", parser: "@typescript-eslint/parser",
sourceType: "module", sourceType: "module",
requireConfigFile: false, requireConfigFile: false,
}, },
plugins: [
"jsdoc",
"@typescript-eslint",
],
rules: { rules: {
"yoda": "error", "yoda": "error",
eqeqeq: [ "warn", "smart" ], eqeqeq: [ "warn", "smart" ],
@ -71,14 +77,14 @@ module.exports = {
"no-var": "error", "no-var": "error",
"key-spacing": "warn", "key-spacing": "warn",
"keyword-spacing": "warn", "keyword-spacing": "warn",
"space-infix-ops": "warn", "space-infix-ops": "error",
"arrow-spacing": "warn", "arrow-spacing": "warn",
"no-trailing-spaces": "error", "no-trailing-spaces": "error",
"no-constant-condition": [ "error", { "no-constant-condition": [ "error", {
"checkLoops": false, "checkLoops": false,
}], }],
"space-before-blocks": "warn", "space-before-blocks": "warn",
//'no-console': 'warn', //"no-console": "warn",
"no-extra-boolean-cast": "off", "no-extra-boolean-cast": "off",
"no-multiple-empty-lines": [ "warn", { "no-multiple-empty-lines": [ "warn", {
"max": 1, "max": 1,
@ -90,14 +96,51 @@ module.exports = {
"no-unneeded-ternary": "error", "no-unneeded-ternary": "error",
"array-bracket-newline": [ "error", "consistent" ], "array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ], "eol-last": [ "error", "always" ],
//'prefer-template': 'error', //"prefer-template": "error",
"template-curly-spacing": [ "warn", "never" ],
"comma-dangle": [ "warn", "only-multiline" ], "comma-dangle": [ "warn", "only-multiline" ],
"no-empty": [ "error", { "no-empty": [ "error", {
"allowEmptyCatch": true "allowEmptyCatch": true
}], }],
"no-control-regex": "off", "no-control-regex": "off",
"one-var": [ "error", "never" ], "one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }] "max-statements-per-line": [ "error", { "max": 1 }],
"jsdoc/check-tag-names": [
"error",
{
"definedTags": [ "link" ]
}
],
"jsdoc/no-undefined-types": "off",
"jsdoc/no-defaults": [
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/require-throws": "warn",
"jsdoc/require-jsdoc": [
"error",
{
"require": {
"FunctionDeclaration": true,
"MethodDefinition": true,
}
}
],
"jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns": [
"warn",
{
"forceRequireReturn": true,
"forceReturnsWithAsync": true
}
],
"jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn"
}, },
"overrides": [ "overrides": [
{ {
@ -107,21 +150,20 @@ module.exports = {
} }
}, },
// Override for jest puppeteer // Override for TypeScript
{ {
"files": [ "files": [
"**/*.spec.js", "**/*.ts",
"**/*.spec.jsx"
], ],
env: { extends: [
jest: true, "plugin:@typescript-eslint/recommended",
}, ],
globals: { "rules": {
page: true, "jsdoc/require-returns-type": "off",
browser: true, "jsdoc/require-param-type": "off",
context: true, "@typescript-eslint/no-explicit-any": "off",
jestPuppeteer: true, "prefer-const": "off",
}, }
} }
] ]
}; };

@ -6,7 +6,7 @@ body:
- type: checkboxes - type: checkboxes
id: no-duplicate-issues id: no-duplicate-issues
attributes: attributes:
label: "⚠️ Please verify that this bug has NOT been raised before." label: "⚠️ Please verify that this question has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)" description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options: options:
- label: "I checked and didn't find similar issue" - label: "I checked and didn't find similar issue"
@ -24,8 +24,14 @@ body:
required: true required: true
attributes: attributes:
label: "📝 Describe your problem" label: "📝 Describe your problem"
description: "Please walk us through it step by step." description: "Please walk us through it step by step. Include all important details and add screenshots where appropriate"
placeholder: "Describe what are you asking for..." placeholder: "Describe what are you asking for..."
- type: textarea
id: error-msg
validations:
required: false
attributes:
label: "📝 Error Message(s) or Log"
- type: input - type: input
id: uptime-kuma-version id: uptime-kuma-version
attributes: attributes:
@ -38,7 +44,7 @@ body:
id: operating-system id: operating-system
attributes: attributes:
label: "💻 Operating System and Arch" label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on?" description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Ubuntu 20.04 x86" placeholder: "Ex. Ubuntu 20.04 x86"
validations: validations:
required: true required: true
@ -46,23 +52,24 @@ body:
id: browser-vendor id: browser-vendor
attributes: attributes:
label: "🌐 Browser" label: "🌐 Browser"
description: "Which browser are you running on?" description: "Which browser are you running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Google Chrome 95.0.4638.69" placeholder: "Ex. Google Chrome 95.0.4638.69"
validations: validations:
required: true required: true
- type: input - type: textarea
id: docker-version id: deployment-info
attributes:
label: "🐋 Docker Version"
description: "If running with Docker, which version are you running?"
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
validations:
required: false
- type: input
id: nodejs-version
attributes: attributes:
label: "🟩 NodeJS Version" label: "🖥️ Deployment Environment"
description: "If running with Node.js? which version are you running?" description: |
placeholder: "Ex. 14.18.0" examples:
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
- **Database**: sqlite/embedded mariadb/external mariadb
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
- **number of monitors**: 42
value: |
- Runtime:
- Database:
- Filesystem used to store the database on:
- number of monitors:
validations: validations:
required: false required: true

@ -3,14 +3,14 @@ description: "Submit a bug report to help us improve"
#title: "[Bug] " #title: "[Bug] "
labels: [bug] labels: [bug]
body: body:
- type: checkboxes - type: textarea
id: no-duplicate-issues id: related-issues
validations:
required: true
attributes: attributes:
label: "⚠️ Please verify that this bug has NOT been raised before." label: "📑 I have found these related issues/pull requests"
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)" description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
options: placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
- label: "I checked and didn't find similar issue"
required: true
- type: checkboxes - type: checkboxes
attributes: attributes:
label: "🛡️ Security Policy" label: "🛡️ Security Policy"
@ -31,7 +31,7 @@ body:
required: true required: true
attributes: attributes:
label: "👟 Reproduction steps" label: "👟 Reproduction steps"
description: "How do you trigger this bug? Please walk us through it step by step." description: "How do you trigger this bug? Please walk us through it step by step. Include all important details and add screenshots where appropriate"
placeholder: "..." placeholder: "..."
- type: textarea - type: textarea
id: expected-behavior id: expected-behavior
@ -61,8 +61,8 @@ body:
id: operating-system id: operating-system
attributes: attributes:
label: "💻 Operating System and Arch" label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on?" description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Ubuntu 20.04 x86" placeholder: "Ex. Ubuntu 20.04 x64 "
validations: validations:
required: true required: true
- type: input - type: input
@ -73,22 +73,23 @@ body:
placeholder: "Ex. Google Chrome 95.0.4638.69" placeholder: "Ex. Google Chrome 95.0.4638.69"
validations: validations:
required: true required: true
- type: input - type: textarea
id: docker-version id: deployment-info
attributes:
label: "🐋 Docker Version"
description: "If running with Docker, which version are you running?"
placeholder: "Ex. Docker 20.10.9 / K8S / Podman"
validations:
required: false
- type: input
id: nodejs-version
attributes: attributes:
label: "🟩 NodeJS Version" label: "🖥️ Deployment Environment"
description: "If running with Node.js? which version are you running?" description: |
placeholder: "Ex. 14.18.0" examples:
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
- **Database**: sqlite/embedded mariadb/external mariadb
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
- **number of monitors**: 42
value: |
- Runtime:
- Database:
- Filesystem used to store the database on:
- number of monitors:
validations: validations:
required: false required: true
- type: textarea - type: textarea
id: logs id: logs
attributes: attributes:

@ -3,14 +3,14 @@ description: "Submit a proposal for a new feature"
#title: "[Feature] " #title: "[Feature] "
labels: [feature-request] labels: [feature-request]
body: body:
- type: checkboxes - type: textarea
id: no-duplicate-issues id: related-issues
validations:
required: true
attributes: attributes:
label: "⚠️ Please verify that this feature request has NOT been suggested before." label: "📑 I have found these related issues/pull requests"
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)" description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
options: placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
- label: "I checked and didn't find similar feature request"
required: true
- type: dropdown - type: dropdown
id: feature-area id: feature-area
attributes: attributes:
@ -18,10 +18,17 @@ body:
description: "What kind of feature request is this?" description: "What kind of feature request is this?"
multiple: true multiple: true
options: options:
- API - API / automation options
- New Notification - New notification-provider
- New Monitor - Change to existing notification-provider
- UI Feature - New monitor
- Change to existing monitor
- Dashboard
- Status-page
- Maintenance
- Deployment
- Certificate expiry
- Settings
- Other - Other
validations: validations:
required: true required: true

@ -12,8 +12,6 @@ labels:
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new. DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so. Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
Your GitHub Advisory URL: Your GitHub Advisory URL:

@ -1,7 +1,7 @@
⚠️⚠️⚠️ Since we do not accept all types of pull requests and do not want to waste your time. Please be sure that you have read pull request rules: ⚠️⚠️⚠️ Since we do not accept all types of pull requests and do not want to waste your time. Please be sure that you have read pull request rules:
https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma
Tick the checkbox if you understand [x]: Tick the checkbox if you understand [x]:
- [ ] I have read and understand the pull request rules. - [ ] I have read and understand the pull request rules.
# Description # Description
@ -15,7 +15,7 @@ Please delete any options that are not relevant.
- Bug fix (non-breaking change which fixes an issue) - Bug fix (non-breaking change which fixes an issue)
- User interface (UI) - User interface (UI)
- New feature (non-breaking change which adds functionality) - New feature (non-breaking change which adds functionality)
- Breaking change (fix or feature that would cause existing functionality to not work as expected) - Breaking change (a fix or feature that would cause existing functionality to not work as expected)
- Other - Other
- This change requires a documentation update - This change requires a documentation update
@ -24,9 +24,8 @@ Please delete any options that are not relevant.
- [ ] My code follows the style guidelines of this project - [ ] My code follows the style guidelines of this project
- [ ] I ran ESLint and other linters for modified files - [ ] I ran ESLint and other linters for modified files
- [ ] I have performed a self-review of my own code and tested it - [ ] I have performed a self-review of my own code and tested it
- [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have commented my code, particularly in hard-to-understand areas (including JSDoc for methods)
(including JSDoc for methods) - [ ] My changes generates no new warnings
- [ ] My changes generate no new warnings
- [ ] My code needed automated testing. I have added them (this is optional task) - [ ] My code needed automated testing. I have added them (this is optional task)
## Screenshots (if any) ## Screenshots (if any)

@ -0,0 +1 @@
# This is a .gitignore style file for 'GrantBirki/json-yaml-validate' Action workflow

@ -1,89 +1,95 @@
# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Auto Test name: Auto Test
on: on:
push: push:
branches: [ master ] branches: [ master, 1.23.X ]
paths-ignore: paths-ignore:
- '*.md' - '*.md'
pull_request: pull_request:
branches: [ master ] branches: [ master, 1.23.X ]
paths-ignore: paths-ignore:
- '*.md' - '*.md'
jobs: jobs:
auto-test: auto-test:
needs: [ check-linters ] needs: [ check-linters, e2e-test ]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
timeout-minutes: 15 timeout-minutes: 15
strategy: strategy:
matrix: matrix:
os: [macos-latest, ubuntu-latest, windows-latest] os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
node: [ 14, 16, 18, 19 ] node: [ 14, 20.5 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node }} - name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: ${{ matrix.node }} node-version: ${{ matrix.node }}
cache: 'npm' - run: npm install npm@9 -g
- run: npm install - run: npm install
- run: npm run build - run: npm run build
- run: npm test - run: npm run test-backend
env: env:
HEADLESS_TEST: 1 HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }} JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
check-linters: # As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
runs-on: ubuntu-latest armv7-simple-test:
needs: [ check-linters ]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [ ARMv7 ]
node: [ 14, 20 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Use Node.js 14 - name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 14 node-version: ${{ matrix.node }}
cache: 'npm' - run: npm install npm@9 -g
- run: npm install - run: npm ci --production
- run: npm run lint
e2e-tests: check-linters:
needs: [ check-linters ]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Use Node.js 14 - name: Use Node.js 20
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 14 node-version: 20
cache: 'npm'
- run: npm install - run: npm install
- run: npm run build - run: npm run lint:prod
- run: npm run cy:test
frontend-unit-tests: e2e-test:
needs: [ check-linters ] needs: [ check-linters ]
runs-on: ubuntu-latest runs-on: ARM64
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Use Node.js 14 - name: Use Node.js 20
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: 14 node-version: 20
cache: 'npm'
- run: npm install - run: npm install
- run: npx playwright install
- run: npm run build - run: npm run build
- run: npm run cy:run:unit - run: npm run test-e2e

@ -14,10 +14,10 @@ jobs:
node-version: [16] node-version: [16]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3 uses: actions/setup-node@v4
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
cache: 'npm' cache: 'npm'

@ -0,0 +1,43 @@
name: "CodeQL"
on:
push:
branches: [ "master", "1.23.X"]
pull_request:
branches: [ "master", "1.23.X"]
schedule:
- cron: '16 22 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
timeout-minutes: 360
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'go', 'javascript-typescript' ]
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@v2
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"

@ -0,0 +1,27 @@
name: json-yaml-validate
on:
push:
branches:
- master
pull_request:
branches:
- master
- 1.23.X
workflow_dispatch:
permissions:
contents: read
pull-requests: write # enable write permissions for pull request comments
jobs:
json-yaml-validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: json-yaml-validate
id: json-yaml-validate
uses: GrantBirki/json-yaml-validate@v2.4.0
with:
comment: "true" # enable comment mode
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions

@ -0,0 +1,17 @@
name: prevent-file-change
on:
pull_request:
jobs:
check-file-changes:
runs-on: ubuntu-latest
steps:
- name: Prevent file change
uses: xalvarez/prevent-file-change-action@v1
with:
githubToken: ${{ secrets.GITHUB_TOKEN }}
# Regex, /src/lang/*.json is not allowed to be changed, except for /src/lang/en.json
pattern: '^(?!src/lang/en\.json$)src/lang/.*\.json$'
trustedAuthors: UptimeKumaBot

@ -1,4 +1,4 @@
name: 'Automatically close stale issues and PRs' name: 'Automatically close stale issues'
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
@ -9,14 +9,14 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@v7 - uses: actions/stale@v8
with: with:
stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.' stale-issue-message: 'We are clearing up our old issues and your ticket has been open for 3 months with no activity. Remove stale label or comment or this will be closed in 2 days.'
close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.' close-issue-message: 'This issue was closed because it has been stalled for 2 days with no activity.'
days-before-stale: 90 days-before-stale: 90
days-before-close: 2 days-before-close: 2
days-before-pr-stale: 999999999 days-before-pr-stale: -1
days-before-pr-close: 1 days-before-pr-close: -1
exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request' exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
exempt-issue-assignees: 'louislam' exempt-issue-assignees: 'louislam'
operations-per-run: 200 operations-per-run: 200

7
.gitignore vendored

@ -7,6 +7,7 @@ dist-ssr
/data /data
!/data/.gitkeep !/data/.gitkeep
/data*
.vscode .vscode
/private /private
@ -14,12 +15,12 @@ dist-ssr
/tmp /tmp
.env .env
cypress/videos
cypress/screenshots
/extra/healthcheck.exe /extra/healthcheck.exe
/extra/healthcheck /extra/healthcheck
/extra/healthcheck-armv7 /extra/healthcheck-armv7
extra/exe-builder/bin extra/exe-builder/bin
extra/exe-builder/obj extra/exe-builder/obj
.vs
.vscode

@ -10,5 +10,7 @@
"color-function-notation": "legacy", "color-function-notation": "legacy",
"shorthand-property-no-redundant-values": null, "shorthand-property-no-redundant-values": null,
"color-hex-length": null, "color-hex-length": null,
"declaration-block-no-redundant-longhand-properties": null,
"at-rule-no-unknown": null
} }
} }

@ -1,14 +1,14 @@
# Project Info # Project Info
First of all, I want to thank everyone who made pull requests for Uptime Kuma. I never thought the GitHub Community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that. First of all, I want to thank everyone who have made pull requests for Uptime Kuma. I never thought the GitHub community would be so nice! Because of this, I also never thought that other people would actually read and edit my code. It is not very well structured or commented, sorry about that.
The project was created with vite.js (vue3). Then I created a subdirectory called "server" for server part. Both frontend and backend share the same package.json. The project was created with vite.js (vue3). Then I created a subdirectory called "server" for the server part. Both frontend and backend share the same `package.json`.
The frontend code build into "dist" directory. The server (express.js) exposes the "dist" directory as root of the endpoint. This is how production is working. The frontend code builds into "dist" directory. The server (express.js) exposes the "dist" directory as the root of the endpoint. This is how production is working.
## Key Technical Skills ## Key Technical Skills
- Node.js (You should know what are promise, async/await and arrow function etc.) - Node.js (You should know about promises, async/await, arrow functions, etc.)
- Socket.io - Socket.io
- SCSS - SCSS
- Vue.js - Vue.js
@ -30,64 +30,65 @@ The frontend code build into "dist" directory. The server (express.js) exposes t
## Can I create a pull request for Uptime Kuma? ## Can I create a pull request for Uptime Kuma?
Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know it will be merged or not. Yes or no, it depends on what you will try to do. Since I don't want to waste your time, be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**. Especially for a large pull request or you don't know if it will be merged or not.
Here are some references: Here are some references:
✅ Usually Accept: ### ✅ Usually accepted
- Bug fix - Bug fix
- Security fix - Security fix
- Adding notification providers - Adding notification providers
- Adding new language files (You should go to https://weblate.kuma.pet for existing languages) - Adding new language files (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Adding new language keys: `$t("...")` - Adding new language keys: `$t("...")`
⚠️ Discussion First ### ⚠️ Discussion required
- Large pull requests - Large pull requests
- New features - New features
❌ Won't Merge ### ❌ Won't be merged
- A dedicated pr for translating existing languages (You can now translate on https://weblate.kuma.pet)
- Do not pass auto test - A dedicated PR for translating existing languages (see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md))
- Do not pass the auto-test
- Any breaking changes - Any breaking changes
- Duplicated pull request - Duplicated pull requests
- Buggy - Buggy
- UI/UX is not close to Uptime Kuma - UI/UX is not close to Uptime Kuma
- Existing logic is completely modified or deleted for no reason - Modifications or deletions of existing logic without a valid reason.
- A function that is completely out of scope - Adding functions that is completely out of scope
- Convert existing code into other programming languages - Converting existing code into other programming languages
- Unnecessary large code changes (Hard to review, causes code conflicts to other pull requests) - Unnecessarily large code changes that are hard to review and cause conflicts with other PRs.
The above cases cannot cover all situations. The above cases may not cover all possible situations.
I (@louislam) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spend on it. Therefore, it is essential to have a discussion beforehand. I ([@louislam](https://github.com/louislam)) have the final say. If your pull request does not meet my expectations, I will reject it, no matter how much time you spent on it. Therefore, it is essential to have a discussion beforehand.
I will mark your pull request in the [milestones](https://github.com/louislam/uptime-kuma/milestones), if I am plan to review and merge it. I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
Also, please don't rush or ask for ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
Also, please don't rush or ask for an ETA, because I have to understand the pull request, make sure it is no breaking changes and stick to my vision of this project, especially for large pull requests.
### Recommended Pull Request Guideline ### Recommended Pull Request Guideline
Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended. Before deep into coding, discussion first is preferred. Creating an empty pull request for discussion would be recommended.
1. Fork the project 1. Fork the project
1. Clone your fork repo to local 2. Clone your fork repo to local
1. Create a new branch 3. Create a new branch
1. Create an empty commit 4. Create an empty commit: `git commit -m "<YOUR TASK NAME>" --allow-empty`
`git commit -m "[empty commit] pull request for <YOUR TASK NAME>" --allow-empty` 5. Push to your fork repo
1. Push to your fork repo 6. Prepare a pull request: https://github.com/louislam/uptime-kuma/compare
1. Create a pull request: https://github.com/louislam/uptime-kuma/compare 7. Write a proper description. You can mention @louislam in it, so @louislam will get the notification.
1. Write a proper description 8. Create your pull request as a Draft
1. Click "Change to draft" 9. Wait for the discussion
1. Discussion
## Project Styles ## Project Styles
I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation could be as easy as like installing a mobile app. I personally do not like something that requires so many configurations before you can finally start the app. I hope Uptime Kuma installation will be as easy as like installing a mobile app.
- Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, no extra effort required to get it running - Easy to install for non-Docker users, no native build dependency is needed (for x86_64/armv7/arm64), no extra config, and no extra effort required to get it running
- Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go - Single container for Docker users, no very complex docker-compose file. Just map the volume and expose the port, then good to go
- Settings should be configurable in the frontend. Environment variable is not encouraged, unless it is related to startup such as `DATA_DIR` - Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
- Easy to use - Easy to use
- The web UI styling should be consistent and nice - The web UI styling should be consistent and nice
@ -106,11 +107,23 @@ I personally do not like something that requires so many configurations before y
## Tools ## Tools
- Node.js >= 14 - [`Node.js`](https://nodejs.org/) >= 14
- NPM >= 8.5 - [`npm`](https://www.npmjs.com/) >= 8.5
- Git - [`git`](https://git-scm.com/)
- IDE that supports ESLint and EditorConfig (I am using IntelliJ IDEA) - IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
- A SQLite GUI tool (SQLite Expert Personal is suggested) - A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
### GitHub Codespaces
If you don't want to setup an local environment, you can now develop on GitHub Codespaces, read more:
https://github.com/louislam/uptime-kuma/tree/master/.devcontainer
## Git Branches
- `master`: 2.X.X development. If you want to add a new feature, your pull request should base on this.
- `1.23.X`: 1.23.X development. If you want to fix a bug for v1 and v2, your pull request should base on this.
- All other branches are unused, outdated or for dev.
## Install Dependencies for Development ## Install Dependencies for Development
@ -130,8 +143,9 @@ Port `3000` and port `3001` will be used.
npm run dev npm run dev
``` ```
But sometimes, you would like to keep restart the server, but not the frontend, you can run these command in two terminals: But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
```
```bash
npm run start-frontend-dev npm run start-frontend-dev
npm run start-server-dev npm run start-server-dev
``` ```
@ -140,19 +154,18 @@ npm run start-server-dev
It binds to `0.0.0.0:3001` by default. It binds to `0.0.0.0:3001` by default.
It is mainly a socket.io app + express.js. It is mainly a socket.io app + express.js.
express.js is used for: express.js is used for:
- entry point such as redirecting to a status page or the dashboard - entry point such as redirecting to a status page or the dashboard
- serving the frontend built files (index.html, .js and .css etc.) - serving the frontend built files (index.html, .js and .css etc.)
- serving internal APIs of status page - serving internal APIs of the status page
### Structure in /server/ ### Structure in /server/
- jobs/ (Jobs that are running in another process) - jobs/ (Jobs that are running in another process)
- model/ (Object model, auto mapping to the database table name) - model/ (Object model, auto-mapping to the database table name)
- modules/ (Modified 3rd-party modules) - modules/ (Modified 3rd-party modules)
- monitor_types (Monitor Types) - monitor_types (Monitor Types)
- notification-providers/ (individual notification logic) - notification-providers/ (individual notification logic)
@ -163,9 +176,9 @@ express.js is used for:
## Frontend Dev Server ## Frontend Dev Server
It binds to `0.0.0.0:3000` by default. Frontend dev server is used for development only. It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
For production, it is not used. It will be compiled to `dist` directory instead. For production, it is not used. It will be compiled to `dist` directory instead.
You can use Vue.js devtools Chrome extension for debugging. You can use Vue.js devtools Chrome extension for debugging.
@ -181,14 +194,13 @@ Uptime Kuma Frontend is a single page application (SPA). Most paths are handled
The router is in `src/router.js` The router is in `src/router.js`
As you can see, most data in frontend is stored in root level, even though you changed the current router to any other pages. As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`. The data and socket logic are in `src/mixins/socket.js`.
## Database Migration ## Database Migration
1. Create `patch-{name}.sql` in `./db/` See: https://github.com/louislam/uptime-kuma/tree/master/db/knex_migrations
2. Add your patch filename in the `patchList` list in `./server/database.js`
## Unit Test ## Unit Test
@ -210,22 +222,68 @@ Both frontend and backend share the same package.json. However, the frontend dep
### Update Dependencies ### Update Dependencies
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update patch release version only. Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/)) Patch release = the third digit ([Semantic Versioning](https://semver.org/))
If for maybe security reasons, a library must be updated. Then you must need to check if there are any breaking changes. If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change.
## Translations ## Translations
Please read: https://github.com/louislam/uptime-kuma/tree/master/src/languages Please add **all** the strings which are translatable to `src/lang/en.json` (if translation keys are omitted, they can not be translated.)
**Don't include any other languages in your initial pull request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not English and my grammar is not that great.
## Wiki ## Wiki
Since there is no way to make a pull request to wiki's repo, I have set up another repo to do that. Since there is no way to make a pull request to the wiki, I have set up another repo to do that.
https://github.com/louislam/uptime-kuma-wiki https://github.com/louislam/uptime-kuma-wiki
## Docker
### Arch
- amd64
- arm64
- armv7
### Docker Tags
#### v2
- `2`, `latest-2`: v2 with full features such as Chromium and bundled MariaDB
- `2.x.x`
- `2-slim`: v2 with basic features
- `2.x.x-slim`
- `beta2`: Latest beta build
- `2.x.x-beta.x`
- `nightly2`: Dev build
- `base2`: Basic Debian setup without Uptime Kuma source code (Full features)
- `base2-slim`: Basic Debian setup without Uptime Kuma source code
- `pr-test2`: For testing pull request without setting up a local environment
#### v1
- `1`, `latest`, `1-debian`, `debian`: Latest version of v1
- `1.x.x`, `1.x.x-debian`
- `1.x.x-beta.x`: Beta build
- `beta`: Latest beta build
- `nightly`: Dev build
- `base-debian`: Basic Debian setup without Uptime Kuma source code
- `pr-test`: For testing pull request without setting up a local environment
- `base-alpine`: (Deprecated) Basic Alpine setup without Uptime Kuma source code
- `1-alpine`, `alpine`: (Deprecated)
- `1.x.x-alpine`: (Deprecated)
## Maintainer ## Maintainer
Check the latest issues and pull requests: Check the latest issues and pull requests:
@ -236,12 +294,12 @@ https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
1. Draft a release note 1. Draft a release note
2. Make sure the repo is cleared 2. Make sure the repo is cleared
3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go` 3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
3. `npm run release-final with env vars: `VERSION` and `GITHUB_TOKEN` 4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue` 5. Wait until the `Press any key to continue`
5. `git push` 6. `git push`
6. Publish the release note as 1.X.X 7. Publish the release note as 1.X.X
7. Press any key to continue 8. Press any key to continue
8. Deploy to the demo server: `npm run deploy-demo-server` 9. Deploy to the demo server: `npm run deploy-demo-server`
Checking: Checking:
@ -274,3 +332,11 @@ git remote add production https://github.com/louislam/uptime-kuma.wiki.git
git pull git pull
git push production master git push production master
``` ```
## Useful Commands
Change the base of a pull request such as `master` to `1.23.X`
```bash
git rebase --onto <new parent> <old parent>
```

@ -1,39 +1,39 @@
# Uptime Kuma
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a>
<div align="center" width="100%"> <div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="" /> <img src="./public/icon.svg" width="128" alt="" />
</div> </div>
# Uptime Kuma
Uptime Kuma is an easy-to-use self-hosted monitoring tool. Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a>
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" /> <img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
## 🥔 Live Demo ## 🥔 Live Demo
Try it! Try it!
- Tokyo Demo Server: https://demo.uptime.kuma.pet (Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors)) Demo Server (Location: Frankfurt - Germany): https://demo.kuma.pet/start-demo
It is a temporary live demo, all data will be deleted after 10 minutes. Use the one that is closer to you, but I suggest that you should install and try it out for the best demo experience. It is a temporary live demo, all data will be deleted after 10 minutes. Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors).
## ⭐ Features ## ⭐ Features
* Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / Ping / DNS Record / Push / Steam Game Server / Docker Containers - Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
* Fancy, Reactive, Fast UI/UX - Fancy, Reactive, Fast UI/UX
* Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications) - Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
* 20 second intervals - 20-second intervals
* [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang) - [Multi Languages](https://github.com/louislam/uptime-kuma/tree/master/src/lang)
* Multiple status pages - Multiple status pages
* Map status pages to specific domains - Map status pages to specific domains
* Ping chart - Ping chart
* Certificate info - Certificate info
* Proxy support - Proxy support
* 2FA support - 2FA support
## 🔧 How to Install ## 🔧 How to Install
@ -43,21 +43,27 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Use the
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1 docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
``` ```
⚠️ Please use a **local volume** only. Other types such as NFS are not supported.
Uptime Kuma is now running on http://localhost:3001 Uptime Kuma is now running on http://localhost:3001
> [!WARNING]
> File Systems like **NFS** (Network File System) are **NOT** supported. Please map to a local directory or volume.
### 💪🏻 Non-Docker ### 💪🏻 Non-Docker
Required Tools: Requirements:
- [Node.js](https://nodejs.org/en/download/) >= 14
- [npm](https://docs.npmjs.com/cli/) >= 7 - Platform
- [Git](https://git-scm.com/downloads) - ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
- ❌ Replit / Heroku
- [Node.js](https://nodejs.org/en/download/) 14 / 16 / 18 / 20.4
- [npm](https://docs.npmjs.com/cli/) 9
- [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background - [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
```bash ```bash
# Update your npm to the latest version # Update your npm
npm install npm -g npm install npm@9 -g
git clone https://github.com/louislam/uptime-kuma.git git clone https://github.com/louislam/uptime-kuma.git
cd uptime-kuma cd uptime-kuma
@ -66,15 +72,14 @@ npm run setup
# Option 1. Try it # Option 1. Try it
node server/server.js node server/server.js
# (Recommended) Option 2. Run in background using PM2 # (Recommended) Option 2. Run in the background using PM2
# Install PM2 if you don't have it: # Install PM2 if you don't have it:
npm install pm2 -g && pm2 install pm2-logrotate npm install pm2 -g && pm2 install pm2-logrotate
# Start Server # Start Server
pm2 start server/server.js --name uptime-kuma pm2 start server/server.js --name uptime-kuma
``` ```
Uptime Kuma is now running on http://localhost:3001 Uptime Kuma is now running on http://localhost:3001
More useful PM2 Commands More useful PM2 Commands
@ -101,14 +106,10 @@ https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next? ## 🆕 What's Next?
I will mark requests/issues to the next milestone. I will assign requests/issues to the next milestone.
https://github.com/louislam/uptime-kuma/milestones https://github.com/louislam/uptime-kuma/milestones
Project Plan:
https://github.com/users/louislam/projects/4/views/1
## ❤️ Sponsors ## ❤️ Sponsors
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated) Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
@ -135,28 +136,33 @@ Telegram Notification Sample:
## Motivation ## Motivation
* I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the close ones is statping. Unfortunately, it is not stable and no longer maintained. - I was looking for a self-hosted monitoring tool like "Uptime Robot", but it is hard to find a suitable one. One of the closest ones is statping. Unfortunately, it is not stable and no longer maintained.
* Want to build a fancy UI. - Wanted to build a fancy UI.
* Learn Vue 3 and vite.js. - Learn Vue 3 and vite.js.
* Show the power of Bootstrap 5. - Show the power of Bootstrap 5.
* Try to use WebSocket with SPA instead of REST API. - Try to use WebSocket with SPA instead of a REST API.
* Deploy my first Docker image to Docker Hub. - Deploy my first Docker image to Docker Hub.
If you love this project, please consider giving it a ⭐.
If you love this project, please consider giving me a ⭐. ## 🗣️ Discussion / Ask for Help
## 🗣️ Discussion ⚠️ For any general or technical questions, please don't send me an email, as I am unable to provide support in that manner. I will not respond if you ask questions there.
### Issues Page I recommend using Google, GitHub Issues, or Uptime Kuma's subreddit for finding answers to your question. If you cannot find the information you need, feel free to ask:
You can discuss or ask for help in [issues](https://github.com/louislam/uptime-kuma/issues). - [GitHub Issues](https://github.com/louislam/uptime-kuma/issues)
- [Subreddit (r/UptimeKuma)](https://www.reddit.com/r/UptimeKuma/)
### Subreddit My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam)
You can mention me if you ask a question on the subreddit.
My Reddit account: [u/louislamlam](https://reddit.com/u/louislamlam). ## Contributions
You can mention me if you ask a question on Reddit.
[r/Uptime kuma](https://www.reddit.com/r/UptimeKuma/)
## Contribute ### Create Pull Requests
We DO NOT accept all types of pull requests and do not want to waste your time. Please be sure that you have read and follow pull request rules:
[CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma)
### Test Pull Requests ### Test Pull Requests
@ -170,12 +176,16 @@ https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests
Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
### Bug Reports / Feature Requests ### Bug Reports / Feature Requests
If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues). If you want to report a bug or request a new feature, feel free to open a [new issue](https://github.com/louislam/uptime-kuma/issues).
### Translations ### Translations
If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md). If you want to translate Uptime Kuma into your language, please visit [Weblate Readme](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
Feel free to correct my grammar in this README, source code, or wiki, as my mother language is not English and my grammar is not that great. ### Spelling & Grammar
Feel free to correct the grammar in the documentation or code.
My mother language is not English and my grammar is not that great.
### Create Pull Requests
If you want to modify Uptime Kuma, please read this guide and follow the rules here: https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md

@ -3,28 +3,28 @@
## Reporting a Vulnerability ## Reporting a Vulnerability
1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new. 1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
1. Please also create a empty security issues for alerting me, as GitHub Advisory do not send a notification, I probably will miss without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md 2. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
Do not use the public issue tracker or discuss it in the public as it will cause more damage. Do not use the public issue tracker or discuss it in public as it will cause more damage.
## Do you accept other 3rd-party bug bounty platforms? ## Do you accept other 3rd-party bug bounty platforms?
At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone have tried to send a phishing link to me by this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails. At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
## Supported Versions ## Supported Versions
### Uptime Kuma Versions ### Uptime Kuma Versions
You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the lastest version. You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
### Upgradable Docker Tags ### Upgradable Docker Tags
| Tag | Supported | | Tag | Supported |
| ------- | ------------------ | |-|-|
| 1 | :white_check_mark: | | 1 | :white_check_mark: |
| 1-debian | :white_check_mark: | | 1-debian | :white_check_mark: |
| latest | :white_check_mark: | | latest | :white_check_mark: |
| debian | :white_check_mark: | | debian | :white_check_mark: |
| 1-alpine | ⚠️ Deprecated | | 1-alpine | ⚠️ Deprecated |
| alpine | ⚠️ Deprecated | | alpine | ⚠️ Deprecated |
| All other tags | ❌ | | All other tags | ❌ |

@ -1,11 +0,0 @@
const config = {};
if (process.env.TEST_FRONTEND) {
config.presets = [ "@babel/preset-env" ];
}
if (process.env.TEST_BACKEND) {
config.plugins = [ "babel-plugin-rewire" ];
}
module.exports = config;

@ -0,0 +1,9 @@
services:
uptime-kuma:
image: louislam/uptime-kuma:1
volumes:
- ./data:/app/data
ports:
# <Host Port>:<Container Port>
- 3001:3001
restart: unless-stopped

@ -0,0 +1,60 @@
import { defineConfig, devices } from "@playwright/test";
const port = 30001;
const url = `http://localhost:${port}`;
export default defineConfig({
// Look for test files in the "tests" directory, relative to this configuration file.
testDir: "../test/e2e",
outputDir: "../private/playwright-test-results",
fullyParallel: false,
locale: "en-US",
// Fail the build on CI if you accidentally left test.only in the source code.
forbidOnly: !!process.env.CI,
// Retry on CI only.
retries: process.env.CI ? 2 : 0,
// Opt out of parallel tests on CI.
workers: 1,
// Reporter to use
reporter: [
[
"html", {
outputFolder: "../private/playwright-report",
open: "never",
}
],
],
use: {
// Base URL to use in actions like `await page.goto('/')`.
baseURL: url,
// Collect trace when retrying the failed test.
trace: "on-first-retry",
},
// Configure projects for major browsers.
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] },
},
/*
{
name: "firefox",
use: { browserName: "firefox" }
},*/
],
// Run your local dev server before starting the tests.
webServer: {
command: `node extra/remove-playwright-test-data.js && node server/server.js --port=${port} --data-dir=./data/playwright-test`,
url,
reuseExistingServer: false,
cwd: "../",
},
});

@ -1,4 +1,3 @@
import legacy from "@vitejs/plugin-legacy";
import vue from "@vitejs/plugin-vue"; import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite"; import { defineConfig } from "vite";
import visualizer from "rollup-plugin-visualizer"; import visualizer from "rollup-plugin-visualizer";
@ -16,12 +15,12 @@ export default defineConfig({
}, },
define: { define: {
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version), "FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
"DEVCONTAINER": JSON.stringify(process.env.DEVCONTAINER),
"GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN": JSON.stringify(process.env.GITHUB_CODESPACES_PORT_FORWARDING_DOMAIN),
"CODESPACE_NAME": JSON.stringify(process.env.CODESPACE_NAME),
}, },
plugins: [ plugins: [
vue(), vue(),
legacy({
targets: [ "since 2015" ],
}),
visualizer({ visualizer({
filename: "tmp/dist-stats.html" filename: "tmp/dist-stats.html"
}), }),
@ -42,6 +41,9 @@ export default defineConfig({
} }
}, },
build: { build: {
commonjsOptions: {
include: [ /.js$/ ],
},
rollupOptions: { rollupOptions: {
output: { output: {
manualChunks(id, { getModuleInfo, getModuleIds }) { manualChunks(id, { getModuleInfo, getModuleIds }) {

@ -0,0 +1,562 @@
const { R } = require("redbean-node");
const { log } = require("../src/util");
/**
* DO NOT ADD ANYTHING HERE!
* IF YOU NEED TO ADD FIELDS, ADD IT TO ./db/knex_migrations
* See ./db/knex_migrations/README.md for more information
* @returns {Promise<void>}
*/
async function createTables() {
log.info("mariadb", "Creating basic tables for MariaDB");
const knex = R.knex;
// TODO: Should check later if it is really the final patch sql file.
// docker_host
await knex.schema.createTable("docker_host", (table) => {
table.increments("id");
table.integer("user_id").unsigned().notNullable();
table.string("docker_daemon", 255);
table.string("docker_type", 255);
table.string("name", 255);
});
// group
await knex.schema.createTable("group", (table) => {
table.increments("id");
table.string("name", 255).notNullable();
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.boolean("public").notNullable().defaultTo(false);
table.boolean("active").notNullable().defaultTo(true);
table.integer("weight").notNullable().defaultTo(1000);
table.integer("status_page_id").unsigned();
});
// proxy
await knex.schema.createTable("proxy", (table) => {
table.increments("id");
table.integer("user_id").unsigned().notNullable();
table.string("protocol", 10).notNullable();
table.string("host", 255).notNullable();
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
table.boolean("auth").notNullable();
table.string("username", 255).nullable();
table.string("password", 255).nullable();
table.boolean("active").notNullable().defaultTo(true);
table.boolean("default").notNullable().defaultTo(false);
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.index("user_id", "proxy_user_id");
});
// user
await knex.schema.createTable("user", (table) => {
table.increments("id");
table.string("username", 255).notNullable().unique().collate("utf8_general_ci");
table.string("password", 255);
table.boolean("active").notNullable().defaultTo(true);
table.string("timezone", 150);
table.string("twofa_secret", 64);
table.boolean("twofa_status").notNullable().defaultTo(false);
table.string("twofa_last_token", 6);
});
// monitor
await knex.schema.createTable("monitor", (table) => {
table.increments("id");
table.string("name", 150);
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("interval").notNullable().defaultTo(20);
table.text("url");
table.string("type", 20);
table.integer("weight").defaultTo(2000);
table.string("hostname", 255);
table.integer("port");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.string("keyword", 255);
table.integer("maxretries").notNullable().defaultTo(0);
table.boolean("ignore_tls").notNullable().defaultTo(false);
table.boolean("upside_down").notNullable().defaultTo(false);
table.integer("maxredirects").notNullable().defaultTo(10);
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
table.string("dns_resolve_type", 5);
table.string("dns_resolve_server", 255);
table.string("dns_last_result", 255);
table.integer("retry_interval").notNullable().defaultTo(0);
table.string("push_token", 20).defaultTo(null);
table.text("method").notNullable().defaultTo("GET");
table.text("body").defaultTo(null);
table.text("headers").defaultTo(null);
table.text("basic_auth_user").defaultTo(null);
table.text("basic_auth_pass").defaultTo(null);
table.integer("docker_host").unsigned()
.references("id").inTable("docker_host");
table.string("docker_container", 255);
table.integer("proxy_id").unsigned()
.references("id").inTable("proxy");
table.boolean("expiry_notification").defaultTo(true);
table.text("mqtt_topic");
table.string("mqtt_success_message", 255);
table.string("mqtt_username", 255);
table.string("mqtt_password", 255);
table.string("database_connection_string", 2000);
table.text("database_query");
table.string("auth_method", 250);
table.text("auth_domain");
table.text("auth_workstation");
table.string("grpc_url", 255).defaultTo(null);
table.text("grpc_protobuf").defaultTo(null);
table.text("grpc_body").defaultTo(null);
table.text("grpc_metadata").defaultTo(null);
table.text("grpc_method").defaultTo(null);
table.text("grpc_service_name").defaultTo(null);
table.boolean("grpc_enable_tls").notNullable().defaultTo(false);
table.string("radius_username", 255);
table.string("radius_password", 255);
table.string("radius_calling_station_id", 50);
table.string("radius_called_station_id", 50);
table.string("radius_secret", 255);
table.integer("resend_interval").notNullable().defaultTo(0);
table.integer("packet_size").notNullable().defaultTo(56);
table.string("game", 255);
});
// heartbeat
await knex.schema.createTable("heartbeat", (table) => {
table.increments("id");
table.boolean("important").notNullable().defaultTo(false);
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.smallint("status").notNullable();
table.text("msg");
table.datetime("time").notNullable();
table.integer("ping");
table.integer("duration").notNullable().defaultTo(0);
table.integer("down_count").notNullable().defaultTo(0);
table.index("important");
table.index([ "monitor_id", "time" ], "monitor_time_index");
table.index("monitor_id");
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
});
// incident
await knex.schema.createTable("incident", (table) => {
table.increments("id");
table.string("title", 255).notNullable();
table.text("content", 255).notNullable();
table.string("style", 30).notNullable().defaultTo("warning");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.datetime("last_updated_date");
table.boolean("pin").notNullable().defaultTo(true);
table.boolean("active").notNullable().defaultTo(true);
table.integer("status_page_id").unsigned();
});
// maintenance
await knex.schema.createTable("maintenance", (table) => {
table.increments("id");
table.string("title", 150).notNullable();
table.text("description").notNullable();
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.boolean("active").notNullable().defaultTo(true);
table.string("strategy", 50).notNullable().defaultTo("single");
table.datetime("start_date");
table.datetime("end_date");
table.time("start_time");
table.time("end_time");
table.string("weekdays", 250).defaultTo("[]");
table.text("days_of_month").defaultTo("[]");
table.integer("interval_day");
table.index("active");
table.index([ "strategy", "active" ], "manual_active");
table.index("user_id", "maintenance_user_id");
});
// status_page
await knex.schema.createTable("status_page", (table) => {
table.increments("id");
table.string("slug", 255).notNullable().unique().collate("utf8_general_ci");
table.string("title", 255).notNullable();
table.text("description");
table.string("icon", 255).notNullable();
table.string("theme", 30).notNullable();
table.boolean("published").notNullable().defaultTo(true);
table.boolean("search_engine_index").notNullable().defaultTo(true);
table.boolean("show_tags").notNullable().defaultTo(false);
table.string("password");
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
table.datetime("modified_date").notNullable().defaultTo(knex.fn.now());
table.text("footer_text");
table.text("custom_css");
table.boolean("show_powered_by").notNullable().defaultTo(true);
table.string("google_analytics_tag_id");
});
// maintenance_status_page
await knex.schema.createTable("maintenance_status_page", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned().notNullable()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
});
// maintenance_timeslot
await knex.schema.createTable("maintenance_timeslot", (table) => {
table.increments("id");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.datetime("start_date").notNullable();
table.datetime("end_date");
table.boolean("generated_next").defaultTo(false);
table.index("maintenance_id");
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
table.index("generated_next", "generated_next_index");
});
// monitor_group
await knex.schema.createTable("monitor_group", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("group_id").unsigned().notNullable()
.references("id").inTable("group")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("weight").notNullable().defaultTo(1000);
table.boolean("send_url").notNullable().defaultTo(false);
table.index([ "monitor_id", "group_id" ], "fk");
});
// monitor_maintenance
await knex.schema.createTable("monitor_maintenance", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index("maintenance_id", "maintenance_id_index2");
table.index("monitor_id", "monitor_id_index");
});
// notification
await knex.schema.createTable("notification", (table) => {
table.increments("id");
table.string("name", 255);
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned();
table.boolean("is_default").notNullable().defaultTo(false);
table.text("config", "longtext");
});
// monitor_notification
await knex.schema.createTable("monitor_notification", (table) => {
table.increments("id").unsigned(); // TODO: no auto increment????
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("notification_id").unsigned().notNullable()
.references("id").inTable("notification")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
});
// tag
await knex.schema.createTable("tag", (table) => {
table.increments("id");
table.string("name", 255).notNullable();
table.string("color", 255).notNullable();
table.datetime("created_date").notNullable().defaultTo(knex.fn.now());
});
// monitor_tag
await knex.schema.createTable("monitor_tag", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("tag_id").unsigned().notNullable()
.references("id").inTable("tag")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("value");
});
// monitor_tls_info
await knex.schema.createTable("monitor_tls_info", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable(); //TODO: no fk ?
table.text("info_json");
});
// notification_sent_history
await knex.schema.createTable("notification_sent_history", (table) => {
table.increments("id");
table.string("type", 50).notNullable();
table.integer("monitor_id").unsigned().notNullable();
table.integer("days").notNullable();
table.unique([ "type", "monitor_id", "days" ]);
table.index([ "type", "monitor_id", "days" ], "good_index");
});
// setting
await knex.schema.createTable("setting", (table) => {
table.increments("id");
table.string("key", 200).notNullable().unique().collate("utf8_general_ci");
table.text("value");
table.string("type", 20);
});
// status_page_cname
await knex.schema.createTable("status_page_cname", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.string("domain").notNullable().unique().collate("utf8_general_ci");
});
/*********************
* Converted Patch here
*********************/
// 2023-06-30-1348-http-body-encoding.js
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
// UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
await knex.schema.table("monitor", function (table) {
table.string("http_body_encoding", 25);
});
await knex("monitor")
.where(function () {
this.where("type", "http").orWhere("type", "keyword");
})
.whereNull("http_body_encoding")
.update({
http_body_encoding: "json",
});
// 2023-06-30-1354-add-description-monitor.js
// ALTER TABLE monitor ADD description TEXT default null;
await knex.schema.table("monitor", function (table) {
table.text("description").defaultTo(null);
});
// 2023-06-30-1357-api-key-table.js
/*
CREATE TABLE [api_key] (
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
[key] VARCHAR(255) NOT NULL,
[name] VARCHAR(255) NOT NULL,
[user_id] INTEGER NOT NULL,
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
[active] BOOLEAN DEFAULT 1 NOT NULL,
[expires] DATETIME DEFAULT NULL,
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
);
*/
await knex.schema.createTable("api_key", function (table) {
table.increments("id").primary();
table.string("key", 255).notNullable();
table.string("name", 255).notNullable();
table.integer("user_id").unsigned().notNullable()
.references("id").inTable("user")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
table.boolean("active").defaultTo(1).notNullable();
table.dateTime("expires").defaultTo(null);
});
// 2023-06-30-1400-monitor-tls.js
/*
ALTER TABLE monitor
ADD tls_ca TEXT default null;
ALTER TABLE monitor
ADD tls_cert TEXT default null;
ALTER TABLE monitor
ADD tls_key TEXT default null;
*/
await knex.schema.table("monitor", function (table) {
table.text("tls_ca").defaultTo(null);
table.text("tls_cert").defaultTo(null);
table.text("tls_key").defaultTo(null);
});
// 2023-06-30-1401-maintenance-cron.js
/*
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
DROP TABLE maintenance_timeslot;
ALTER TABLE maintenance ADD cron TEXT;
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
*/
await knex.schema
.dropTableIfExists("maintenance_timeslot")
.table("maintenance", function (table) {
table.text("cron");
table.string("timezone", 255);
table.integer("duration");
});
// 2023-06-30-1413-add-parent-monitor.js.
/*
ALTER TABLE monitor
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
*/
await knex.schema.table("monitor", function (table) {
table.integer("parent").unsigned()
.references("id").inTable("monitor")
.onDelete("SET NULL")
.onUpdate("CASCADE");
});
/*
patch-add-invert-keyword.sql
ALTER TABLE monitor
ADD invert_keyword BOOLEAN default 0 not null;
*/
await knex.schema.table("monitor", function (table) {
table.boolean("invert_keyword").defaultTo(0).notNullable();
});
/*
patch-added-json-query.sql
ALTER TABLE monitor
ADD json_path TEXT;
ALTER TABLE monitor
ADD expected_value VARCHAR(255);
*/
await knex.schema.table("monitor", function (table) {
table.text("json_path");
table.string("expected_value", 255);
});
/*
patch-added-kafka-producer.sql
ALTER TABLE monitor
ADD kafka_producer_topic VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_brokers TEXT;
ALTER TABLE monitor
ADD kafka_producer_ssl INTEGER;
ALTER TABLE monitor
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_sasl_options TEXT;
ALTER TABLE monitor
ADD kafka_producer_message TEXT;
*/
await knex.schema.table("monitor", function (table) {
table.string("kafka_producer_topic", 255);
table.text("kafka_producer_brokers");
// patch-fix-kafka-producer-booleans.sql
table.boolean("kafka_producer_ssl").defaultTo(0).notNullable();
table.boolean("kafka_producer_allow_auto_topic_creation").defaultTo(0).notNullable();
table.text("kafka_producer_sasl_options");
table.text("kafka_producer_message");
});
/*
patch-add-certificate-expiry-status-page.sql
ALTER TABLE status_page
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
*/
await knex.schema.table("status_page", function (table) {
table.boolean("show_certificate_expiry").defaultTo(0).notNullable();
});
/*
patch-monitor-oauth-cc.sql
ALTER TABLE monitor
ADD oauth_client_id TEXT default null;
ALTER TABLE monitor
ADD oauth_client_secret TEXT default null;
ALTER TABLE monitor
ADD oauth_token_url TEXT default null;
ALTER TABLE monitor
ADD oauth_scopes TEXT default null;
ALTER TABLE monitor
ADD oauth_auth_method TEXT default null;
*/
await knex.schema.table("monitor", function (table) {
table.text("oauth_client_id").defaultTo(null);
table.text("oauth_client_secret").defaultTo(null);
table.text("oauth_token_url").defaultTo(null);
table.text("oauth_scopes").defaultTo(null);
table.text("oauth_auth_method").defaultTo(null);
});
/*
patch-add-timeout-monitor.sql
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
*/
await knex.schema.table("monitor", function (table) {
table.double("timeout").defaultTo(0).notNullable();
});
/*
patch-add-gamedig-given-port.sql
ALTER TABLE monitor
ADD gamedig_given_port_only BOOLEAN default 1 not null;
*/
await knex.schema.table("monitor", function (table) {
table.boolean("gamedig_given_port_only").defaultTo(1).notNullable();
});
log.info("mariadb", "Created basic tables for MariaDB");
}
module.exports = {
createTables,
};

@ -0,0 +1,41 @@
exports.up = function (knex) {
return knex.schema
.createTable("stat_minutely", function (table) {
table.increments("id");
table.comment("This table contains the minutely aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest minute");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
})
.createTable("stat_daily", function (table) {
table.increments("id");
table.comment("This table contains the daily aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest day");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
});
};
exports.down = function (knex) {
return knex.schema
.dropTable("stat_minutely")
.dropTable("stat_daily");
};

@ -0,0 +1,16 @@
exports.up = function (knex) {
// Add new column heartbeat.end_time
return knex.schema
.alterTable("heartbeat", function (table) {
table.datetime("end_time").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
// Rename heartbeat.start_time to heartbeat.time
return knex.schema
.alterTable("heartbeat", function (table) {
table.dropColumn("end_time");
});
};

@ -0,0 +1,15 @@
exports.up = function (knex) {
// Add new column heartbeat.retries
return knex.schema
.alterTable("heartbeat", function (table) {
table.integer("retries").notNullable().defaultTo(0);
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("heartbeat", function (table) {
table.dropColumn("retries");
});
};

@ -0,0 +1,16 @@
exports.up = function (knex) {
// Add new column monitor.mqtt_check_type
return knex.schema
.alterTable("monitor", function (table) {
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
});
};
exports.down = function (knex) {
// Drop column monitor.mqtt_check_type
return knex.schema
.alterTable("monitor", function (table) {
table.dropColumn("mqtt_check_type");
});
};

@ -0,0 +1,14 @@
exports.up = function (knex) {
// update monitor.push_token to 32 length
return knex.schema
.alterTable("monitor", function (table) {
table.string("push_token", 32).alter();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
table.string("push_token", 20).alter();
});
};

@ -0,0 +1,21 @@
exports.up = function (knex) {
return knex.schema
.createTable("remote_browser", function (table) {
table.increments("id");
table.string("name", 255).notNullable();
table.string("url", 255).notNullable();
table.integer("user_id").unsigned();
}).alterTable("monitor", function (table) {
// Add new column monitor.remote_browser
table.integer("remote_browser").nullable().defaultTo(null).unsigned()
.index()
.references("id")
.inTable("remote_browser");
});
};
exports.down = function (knex) {
return knex.schema.dropTable("remote_browser").alterTable("monitor", function (table) {
table.dropColumn("remote_browser");
});
};

@ -0,0 +1,24 @@
exports.up = function (knex) {
return knex.schema
.alterTable("stat_daily", function (table) {
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
})
.alterTable("stat_minutely", function (table) {
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("stat_daily", function (table) {
table.dropColumn("ping_min");
table.dropColumn("ping_max");
})
.alterTable("stat_minutely", function (table) {
table.dropColumn("ping_min");
table.dropColumn("ping_max");
});
};

@ -0,0 +1,26 @@
exports.up = function (knex) {
return knex.schema
.createTable("stat_hourly", function (table) {
table.increments("id");
table.comment("This table contains the hourly aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest hour");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
});
};
exports.down = function (knex) {
return knex.schema
.dropTable("stat_hourly");
};

@ -0,0 +1,56 @@
# Info
https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Basic rules
- All tables must have a primary key named `id`
- Filename format: `YYYY-MM-DD-HHMM-patch-name.js`
- Avoid native SQL syntax, use knex methods, because Uptime Kuma supports SQLite and MariaDB.
## Template
```js
exports.up = function(knex) {
};
exports.down = function(knex) {
};
// exports.config = { transaction: false };
```
## Example
Filename: 2023-06-30-1348-create-user-and-product.js
```js
exports.up = function(knex) {
return knex.schema
.createTable('user', function (table) {
table.increments('id');
table.string('first_name', 255).notNullable();
table.string('last_name', 255).notNullable();
})
.createTable('product', function (table) {
table.increments('id');
table.decimal('price').notNullable();
table.string('name', 1000).notNullable();
}).then(() => {
knex("products").insert([
{ price: 10, name: "Apple" },
{ price: 20, name: "Orange" },
]);
});
};
exports.down = function(knex) {
return knex.schema
.dropTable("product")
.dropTable("user");
};
```
https://knexjs.org/guide/migrations.html#transactions-in-migrations

@ -0,0 +1,3 @@
# Don't create a new migration file here
Please go to ./db/knex_migrations/README.md

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD show_certificate_expiry BOOLEAN default 0 NOT NULL;
COMMIT;

@ -1,5 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db. -- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION; BEGIN TRANSACTION;
ALTER TABLE monitor_group ALTER TABLE monitor_group
ADD send_url BOOLEAN DEFAULT 0 NOT NULL; ADD send_url BOOLEAN DEFAULT 0 NOT NULL;
COMMIT; COMMIT;

@ -1,4 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db. -- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION; BEGIN TRANSACTION;
ALTER TABLE status_page ADD google_analytics_tag_id VARCHAR;
ALTER TABLE monitor
ADD description TEXT default null;
COMMIT; COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD gamedig_given_port_only BOOLEAN default 1 not null;
COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD game VARCHAR(255);
COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD google_analytics_tag_id VARCHAR;
COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD invert_keyword BOOLEAN default 0 not null;
COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
COMMIT;

@ -1,3 +1,4 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION; BEGIN TRANSACTION;
ALTER TABLE monitor ALTER TABLE monitor
@ -15,4 +16,4 @@ ALTER TABLE monitor
ALTER TABLE monitor ALTER TABLE monitor
ADD radius_secret VARCHAR(255); ADD radius_secret VARCHAR(255);
COMMIT COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD timeout DOUBLE default 0 not null;
COMMIT;

@ -0,0 +1,10 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD json_path TEXT;
ALTER TABLE monitor
ADD expected_value VARCHAR(255);
COMMIT;

@ -0,0 +1,22 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD kafka_producer_topic VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_brokers TEXT;
ALTER TABLE monitor
ADD kafka_producer_ssl INTEGER;
ALTER TABLE monitor
ADD kafka_producer_allow_auto_topic_creation VARCHAR(255);
ALTER TABLE monitor
ADD kafka_producer_sasl_options TEXT;
ALTER TABLE monitor
ADD kafka_producer_message TEXT;
COMMIT;

@ -0,0 +1,15 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
CREATE TABLE [api_key] (
[id] INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
[key] VARCHAR(255) NOT NULL,
[name] VARCHAR(255) NOT NULL,
[user_id] INTEGER NOT NULL,
[created_date] DATETIME DEFAULT (DATETIME('now')) NOT NULL,
[active] BOOLEAN DEFAULT 1 NOT NULL,
[expires] DATETIME DEFAULT NULL,
CONSTRAINT FK_user FOREIGN KEY ([user_id]) REFERENCES [user]([id]) ON DELETE CASCADE ON UPDATE CASCADE
);
COMMIT;

@ -0,0 +1,34 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- Rename COLUMNs to another one (suffixed by `_old`)
ALTER TABLE monitor
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
ALTER TABLE monitor
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
-- Add correct COLUMNs
ALTER TABLE monitor
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
ALTER TABLE monitor
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
-- Set bring old values from `_old` COLUMNs to correct ones
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
-- Remove old COLUMNs
ALTER TABLE monitor
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
ALTER TABLE monitor
DROP COLUMN kafka_producer_ssl_old;
COMMIT;

@ -0,0 +1,12 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
COMMIT;
BEGIN TRANSACTION;
UPDATE monitor SET http_body_encoding = 'json' WHERE (type = 'http' or type = 'keyword') AND http_body_encoding IS NULL;
COMMIT;

@ -0,0 +1,11 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
DROP TABLE maintenance_timeslot;
-- 999 characters. https://stackoverflow.com/questions/46134830/maximum-length-for-cron-job
ALTER TABLE maintenance ADD cron TEXT;
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
COMMIT;

@ -0,0 +1,19 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD oauth_client_id TEXT default null;
ALTER TABLE monitor
ADD oauth_client_secret TEXT default null;
ALTER TABLE monitor
ADD oauth_token_url TEXT default null;
ALTER TABLE monitor
ADD oauth_scopes TEXT default null;
ALTER TABLE monitor
ADD oauth_auth_method TEXT default null;
COMMIT;

@ -0,0 +1,13 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD tls_ca TEXT default null;
ALTER TABLE monitor
ADD tls_cert TEXT default null;
ALTER TABLE monitor
ADD tls_key TEXT default null;
COMMIT;

@ -0,0 +1,10 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
ALTER TABLE notification RENAME COLUMN config TO config_old;
ALTER TABLE notification ADD COLUMN config TEXT;
UPDATE notification SET config = config_old;
ALTER TABLE notification DROP COLUMN config_old;
COMMIT;

@ -1,5 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db. -- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION; BEGIN TRANSACTION;
ALTER TABLE monitor ALTER TABLE monitor
ADD packet_size INTEGER DEFAULT 56 NOT NULL; ADD packet_size INTEGER DEFAULT 56 NOT NULL;
COMMIT; COMMIT;

@ -18,5 +18,4 @@ drop table setting;
alter table setting_dg_tmp rename to setting; alter table setting_dg_tmp rename to setting;
COMMIT; COMMIT;

@ -0,0 +1,11 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page
ADD footer_text TEXT;
ALTER TABLE status_page
ADD custom_css TEXT;
ALTER TABLE status_page
ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
COMMIT;

@ -0,0 +1,7 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
UPDATE monitor SET timeout = (interval * 0.8)
WHERE timeout IS NULL OR timeout <= 0;
COMMIT;

@ -1,3 +1,4 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION; BEGIN TRANSACTION;
CREATE TABLE monitor_tls_info ( CREATE TABLE monitor_tls_info (

@ -1,5 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE monitor
ADD game VARCHAR(255);
COMMIT

@ -1,6 +0,0 @@
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
BEGIN TRANSACTION;
ALTER TABLE status_page ADD footer_text TEXT;
ALTER TABLE status_page ADD custom_css TEXT;
ALTER TABLE status_page ADD show_powered_by BOOLEAN NOT NULL DEFAULT 1;
COMMIT;

@ -1,8 +0,0 @@
# DON'T UPDATE TO alpine3.13, 1.14, see #41.
FROM node:16-alpine3.12
WORKDIR /app
# Install apprise, iputils for non-root ping, setpriv
RUN apk add --no-cache iputils setpriv dumb-init python3 py3-cryptography py3-pip py3-six py3-yaml py3-click py3-markdown py3-requests py3-requests-oauthlib git && \
pip3 --no-cache-dir install apprise==1.2.1 && \
rm -rf /root/.cache

@ -1,28 +1,51 @@
# DON'T UPDATE TO node:14-bullseye-slim, see #372.
# If the image changed, the second stage image should be changed too # If the image changed, the second stage image should be changed too
FROM node:16-buster-slim FROM node:20-bookworm-slim AS base2-slim
ARG TARGETPLATFORM ARG TARGETPLATFORM
WORKDIR /app # Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
# apprise = for notifications (From testing repo)
# Install Curl # sqlite3 = for debugging
# Install Apprise, add sqlite3 cli for debugging in the future, iputils-ping for ping, util-linux for setpriv # iputils-ping = for ping
# Stupid python3 and python3-pip actually install a lot of useless things into Debian, specify --no-install-recommends to skip them, make the base even smaller than alpine! # util-linux = for setpriv (Should be dropped in 2.0.0?)
RUN apt update && \ # dumb-init = avoid zombie processes (#480)
apt --yes --no-install-recommends install python3 python3-pip python3-cryptography python3-six python3-yaml python3-click python3-markdown python3-requests python3-requests-oauthlib \ # curl = for debugging
sqlite3 iputils-ping util-linux dumb-init git && \ # ca-certificates = keep the cert up-to-date
pip3 --no-cache-dir install apprise==1.2.1 && \ # sudo = for start service nscd with non-root user
# nscd = for better DNS caching
RUN echo "deb http://deb.debian.org/debian testing main" >> /etc/apt/sources.list && \
apt update && \
apt --yes --no-install-recommends -t testing install apprise sqlite3 ca-certificates && \
apt --yes --no-install-recommends -t stable install \
iputils-ping \
util-linux \
dumb-init \
curl \
sudo \
nscd && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove apt --yes autoremove
# Install cloudflared # Install cloudflared
# dpkg --add-architecture arm: cloudflared do not provide armhf, this is workaround. Read more: https://github.com/cloudflare/cloudflared/issues/583 RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
COPY extra/download-cloudflared.js ./extra/download-cloudflared.js echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bullseye main' | tee /etc/apt/sources.list.d/cloudflared.list && \
RUN node ./extra/download-cloudflared.js $TARGETPLATFORM && \
dpkg --add-architecture arm && \
apt update && \ apt update && \
apt --yes --no-install-recommends install ./cloudflared.deb && \ apt install --yes --no-install-recommends -t stable cloudflared && \
cloudflared version && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
rm -f cloudflared.deb && \
apt --yes autoremove apt --yes autoremove
# For nscd
COPY ./docker/etc/nscd.conf /etc/nscd.conf
COPY ./docker/etc/sudoers /etc/sudoers
# Full Base Image
# MariaDB, Chromium and fonts
FROM base2-slim AS base2
ENV UPTIME_KUMA_ENABLE_EMBEDDED_MARIADB=1
RUN apt update && \
apt --yes --no-install-recommends install chromium fonts-indic fonts-noto fonts-noto-cjk mariadb-server && \
rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove && \
chown -R node:node /var/lib/mysql

@ -0,0 +1,14 @@
version: '3.8'
services:
uptime-kuma:
container_name: uptime-kuma-dev
image: louislam/uptime-kuma:nightly2
volumes:
#- ./data:/app/data
- ../server:/app/server
- ../db:/app/db
ports:
- "3001:3001" # <Host Port>:<Container Port>
- "3307:3306"

@ -1,14 +0,0 @@
# Simple docker-compose.yml
# You can change your port or volume location
version: '3.3'
services:
uptime-kuma:
image: louislam/uptime-kuma:1
container_name: uptime-kuma
volumes:
- ./uptime-kuma-data:/app/data
ports:
- 3001:3001 # <Host Port>:<Container Port>
restart: always

@ -1,6 +1,8 @@
ARG BASE_IMAGE=louislam/uptime-kuma:base2
############################################ ############################################
# Build in Golang # Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck # Run npm run build-healthcheck-armv7 in the host first, otherwise it will be super slow where it is building the armv7 healthcheck
# Check file: builder-go.dockerfile # Check file: builder-go.dockerfile
############################################ ############################################
FROM louislam/uptime-kuma:builder-go AS build_healthcheck FROM louislam/uptime-kuma:builder-go AS build_healthcheck
@ -8,47 +10,57 @@ FROM louislam/uptime-kuma:builder-go AS build_healthcheck
############################################ ############################################
# Build in Node.js # Build in Node.js
############################################ ############################################
FROM louislam/uptime-kuma:base-debian AS build FROM louislam/uptime-kuma:base2 AS build
USER node
WORKDIR /app WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc COPY --chown=node:node .npmrc .npmrc
COPY package.json package.json COPY --chown=node:node package.json package.json
COPY package-lock.json package-lock.json COPY --chown=node:node package-lock.json package-lock.json
RUN npm ci --omit=dev RUN npm ci --omit=dev
COPY . . COPY . .
COPY --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck COPY --chown=node:node --from=build_healthcheck /app/extra/healthcheck /app/extra/healthcheck
RUN chmod +x /app/extra/entrypoint.sh RUN mkdir ./data
############################################ ############################################
# ⭐ Main Image # ⭐ Main Image
############################################ ############################################
FROM louislam/uptime-kuma:base-debian AS release FROM $BASE_IMAGE AS release
USER node
WORKDIR /app WORKDIR /app
# Copy app files from build layer LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
COPY --from=build /app /app
ENV UPTIME_KUMA_IS_CONTAINER=1
# Copy app files from build layer
COPY --chown=node:node --from=build /app /app
EXPOSE 3001 EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"] ENTRYPOINT ["/usr/bin/dumb-init", "--"]
CMD ["node", "server/server.js"] CMD ["node", "server/server.js"]
############################################
# Rootless Image
############################################
FROM release AS rootless
############################################ ############################################
# Mark as Nightly # Mark as Nightly
############################################ ############################################
FROM release AS nightly FROM release AS nightly
RUN npm run mark-as-nightly RUN npm run mark-as-nightly
FROM nightly AS nightly-rootless
USER node
############################################ ############################################
# Build an image for testing pr # Build an image for testing pr
############################################ ############################################
FROM louislam/uptime-kuma:base-debian AS pr-test FROM louislam/uptime-kuma:base2 AS pr-test2
WORKDIR /app WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
## Install Git ## Install Git
@ -70,14 +82,13 @@ RUN git clone https://github.com/louislam/uptime-kuma.git .
RUN npm ci RUN npm ci
EXPOSE 3000 3001 EXPOSE 3000 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD extra/healthcheck
CMD ["npm", "run", "start-pr-test"] CMD ["npm", "run", "start-pr-test"]
############################################ ############################################
# Upload the artifact to Github # Upload the artifact to Github
############################################ ############################################
FROM louislam/uptime-kuma:base-debian AS upload-artifact FROM louislam/uptime-kuma:base2 AS upload-artifact
WORKDIR / WORKDIR /
RUN apt update && \ RUN apt update && \
apt --yes install curl file apt --yes install curl file

@ -1,27 +0,0 @@
FROM louislam/uptime-kuma:base-alpine AS build
WORKDIR /app
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
COPY .npmrc .npmrc
COPY package.json package.json
COPY package-lock.json package-lock.json
RUN npm ci --omit=dev
COPY . .
RUN chmod +x /app/extra/entrypoint.sh
FROM louislam/uptime-kuma:base-alpine AS release
WORKDIR /app
# Copy app files from build layer
COPY --from=build /app /app
EXPOSE 3001
VOLUME ["/app/data"]
HEALTHCHECK --interval=60s --timeout=30s --start-period=180s --retries=5 CMD node extra/healthcheck.js
ENTRYPOINT ["/usr/bin/dumb-init", "--", "extra/entrypoint.sh"]
CMD ["node", "server/server.js"]
FROM release AS nightly
RUN npm run mark-as-nightly

@ -0,0 +1,90 @@
#
# /etc/nscd.conf
#
# An example Name Service Cache config file. This file is needed by nscd.
#
# Legal entries are:
#
# logfile <file>
# debug-level <level>
# threads <initial #threads to use>
# max-threads <maximum #threads to use>
# server-user <user to run server as instead of root>
# server-user is ignored if nscd is started with -S parameters
# stat-user <user who is allowed to request statistics>
# reload-count unlimited|<number>
# paranoia <yes|no>
# restart-interval <time in seconds>
#
# enable-cache <service> <yes|no>
# positive-time-to-live <service> <time in seconds>
# negative-time-to-live <service> <time in seconds>
# suggested-size <service> <prime number>
# check-files <service> <yes|no>
# persistent <service> <yes|no>
# shared <service> <yes|no>
# max-db-size <service> <number bytes>
# auto-propagate <service> <yes|no>
#
# Currently supported cache names (services): passwd, group, hosts, services
#
# logfile /var/log/nscd.log
# threads 4
# max-threads 32
# server-user node
# stat-user somebody
debug-level 0
# reload-count 5
paranoia no
# restart-interval 3600
enable-cache passwd no
positive-time-to-live passwd 600
negative-time-to-live passwd 20
suggested-size passwd 211
check-files passwd yes
persistent passwd yes
shared passwd yes
max-db-size passwd 33554432
auto-propagate passwd yes
enable-cache group no
positive-time-to-live group 3600
negative-time-to-live group 60
suggested-size group 211
check-files group yes
persistent group yes
shared group yes
max-db-size group 33554432
auto-propagate group yes
enable-cache hosts yes
positive-time-to-live hosts 3600
negative-time-to-live hosts 20
suggested-size hosts 211
check-files hosts yes
persistent hosts yes
# Set shared to "no" to display stats in `nscd -g`
# Read more: https://stackoverflow.com/questions/40429245/nscdcentos7curl-0-dns-cache-hit-rate
shared hosts no
max-db-size hosts 33554432
enable-cache services no
positive-time-to-live services 28800
negative-time-to-live services 20
suggested-size services 211
check-files services yes
persistent services yes
shared services yes
max-db-size services 33554432
enable-cache netgroup no
positive-time-to-live netgroup 28800
negative-time-to-live netgroup 20
suggested-size netgroup 211
check-files netgroup yes
persistent netgroup yes
shared netgroup yes
max-db-size netgroup 33554432

@ -0,0 +1,31 @@
#
# This file MUST be edited with the 'visudo' command as root.
#
# Please consider adding local content in /etc/sudoers.d/ instead of
# directly modifying this file.
#
# See the man page for details on how to write a sudoers file.
#
Defaults env_reset
Defaults mail_badpass
Defaults secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
# Host alias specification
# User alias specification
# Cmnd alias specification
# User privilege specification
root ALL=(ALL:ALL) ALL
# Allow members of group sudo to execute any command
%sudo ALL=(ALL:ALL) ALL
# See sudoers(5) for more information on "#include" directives:
#includedir /etc/sudoers.d
# Allow `node` to control service (mainly for nscd)
node ALL=(root) NOPASSWD: /usr/sbin/nscdservice
node ALL=(root) NOPASSWD: /usr/sbin/service

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save