commit
124f5a471a
@ -0,0 +1,43 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master", "1.23.X"]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "master", "1.23.X"]
|
||||||
|
schedule:
|
||||||
|
- cron: '16 22 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 360
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'go', 'javascript-typescript' ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
@ -0,0 +1,25 @@
|
|||||||
|
name: Merge Conflict Labeler
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
types: [synchronize]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
label:
|
||||||
|
name: Labeling
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.repository == 'louislam/uptime-kuma' }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- name: Apply label
|
||||||
|
uses: eps1lon/actions-label-merge-conflict@v3
|
||||||
|
with:
|
||||||
|
dirtyLabel: 'needs:resolve-merge-conflict'
|
||||||
|
repoToken: '${{ secrets.GITHUB_TOKEN }}'
|
@ -1,7 +0,0 @@
|
|||||||
const config = {};
|
|
||||||
|
|
||||||
if (process.env.TEST_FRONTEND) {
|
|
||||||
config.presets = [ "@babel/preset-env" ];
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = config;
|
|
@ -0,0 +1,9 @@
|
|||||||
|
services:
|
||||||
|
uptime-kuma:
|
||||||
|
image: louislam/uptime-kuma:1
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
|
ports:
|
||||||
|
# <Host Port>:<Container Port>
|
||||||
|
- 3001:3001
|
||||||
|
restart: unless-stopped
|
@ -1,28 +0,0 @@
|
|||||||
const { defineConfig } = require("cypress");
|
|
||||||
|
|
||||||
module.exports = defineConfig({
|
|
||||||
projectId: "vyjuem",
|
|
||||||
e2e: {
|
|
||||||
experimentalStudio: true,
|
|
||||||
setupNodeEvents(on, config) {
|
|
||||||
|
|
||||||
},
|
|
||||||
fixturesFolder: "test/cypress/fixtures",
|
|
||||||
screenshotsFolder: "test/cypress/screenshots",
|
|
||||||
videosFolder: "test/cypress/videos",
|
|
||||||
downloadsFolder: "test/cypress/downloads",
|
|
||||||
supportFile: "test/cypress/support/e2e.js",
|
|
||||||
baseUrl: "http://localhost:3002",
|
|
||||||
defaultCommandTimeout: 10000,
|
|
||||||
pageLoadTimeout: 60000,
|
|
||||||
viewportWidth: 1920,
|
|
||||||
viewportHeight: 1080,
|
|
||||||
specPattern: [
|
|
||||||
"test/cypress/e2e/setup.cy.js",
|
|
||||||
"test/cypress/e2e/**/*.js"
|
|
||||||
],
|
|
||||||
},
|
|
||||||
env: {
|
|
||||||
baseUrl: "http://localhost:3002",
|
|
||||||
},
|
|
||||||
});
|
|
@ -1,10 +0,0 @@
|
|||||||
const { defineConfig } = require("cypress");
|
|
||||||
|
|
||||||
module.exports = defineConfig({
|
|
||||||
e2e: {
|
|
||||||
supportFile: false,
|
|
||||||
specPattern: [
|
|
||||||
"test/cypress/unit/**/*.js"
|
|
||||||
],
|
|
||||||
}
|
|
||||||
});
|
|
@ -0,0 +1,60 @@
|
|||||||
|
import { defineConfig, devices } from "@playwright/test";
|
||||||
|
|
||||||
|
const port = 30001;
|
||||||
|
const url = `http://localhost:${port}`;
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
// Look for test files in the "tests" directory, relative to this configuration file.
|
||||||
|
testDir: "../test/e2e",
|
||||||
|
outputDir: "../private/playwright-test-results",
|
||||||
|
fullyParallel: false,
|
||||||
|
locale: "en-US",
|
||||||
|
|
||||||
|
// Fail the build on CI if you accidentally left test.only in the source code.
|
||||||
|
forbidOnly: !!process.env.CI,
|
||||||
|
|
||||||
|
// Retry on CI only.
|
||||||
|
retries: process.env.CI ? 2 : 0,
|
||||||
|
|
||||||
|
// Opt out of parallel tests on CI.
|
||||||
|
workers: 1,
|
||||||
|
|
||||||
|
// Reporter to use
|
||||||
|
reporter: [
|
||||||
|
[
|
||||||
|
"html", {
|
||||||
|
outputFolder: "../private/playwright-report",
|
||||||
|
open: "never",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
],
|
||||||
|
|
||||||
|
use: {
|
||||||
|
// Base URL to use in actions like `await page.goto('/')`.
|
||||||
|
baseURL: url,
|
||||||
|
|
||||||
|
// Collect trace when retrying the failed test.
|
||||||
|
trace: "on-first-retry",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Configure projects for major browsers.
|
||||||
|
projects: [
|
||||||
|
{
|
||||||
|
name: "chromium",
|
||||||
|
use: { ...devices["Desktop Chrome"] },
|
||||||
|
},
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
name: "firefox",
|
||||||
|
use: { browserName: "firefox" }
|
||||||
|
},*/
|
||||||
|
],
|
||||||
|
|
||||||
|
// Run your local dev server before starting the tests.
|
||||||
|
webServer: {
|
||||||
|
command: `node extra/remove-playwright-test-data.js && node server/server.js --port=${port} --data-dir=./data/playwright-test`,
|
||||||
|
url,
|
||||||
|
reuseExistingServer: false,
|
||||||
|
cwd: "../",
|
||||||
|
},
|
||||||
|
});
|
@ -0,0 +1,15 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
// Add new column heartbeat.retries
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("heartbeat", function (table) {
|
||||||
|
table.integer("retries").notNullable().defaultTo(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("heartbeat", function (table) {
|
||||||
|
table.dropColumn("retries");
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,16 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
// Add new column monitor.mqtt_check_type
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
// Drop column monitor.mqtt_check_type
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.dropColumn("mqtt_check_type");
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,14 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
// update monitor.push_token to 32 length
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.string("push_token", 32).alter();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("monitor", function (table) {
|
||||||
|
table.string("push_token", 20).alter();
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,21 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.createTable("remote_browser", function (table) {
|
||||||
|
table.increments("id");
|
||||||
|
table.string("name", 255).notNullable();
|
||||||
|
table.string("url", 255).notNullable();
|
||||||
|
table.integer("user_id").unsigned();
|
||||||
|
}).alterTable("monitor", function (table) {
|
||||||
|
// Add new column monitor.remote_browser
|
||||||
|
table.integer("remote_browser").nullable().defaultTo(null).unsigned()
|
||||||
|
.index()
|
||||||
|
.references("id")
|
||||||
|
.inTable("remote_browser");
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema.dropTable("remote_browser").alterTable("monitor", function (table) {
|
||||||
|
table.dropColumn("remote_browser");
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,12 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("status_page", function (table) {
|
||||||
|
table.integer("auto_refresh_interval").defaultTo(300).unsigned();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema.alterTable("status_page", function (table) {
|
||||||
|
table.dropColumn("auto_refresh_interval");
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,24 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("stat_daily", function (table) {
|
||||||
|
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||||
|
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||||
|
})
|
||||||
|
.alterTable("stat_minutely", function (table) {
|
||||||
|
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||||
|
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("stat_daily", function (table) {
|
||||||
|
table.dropColumn("ping_min");
|
||||||
|
table.dropColumn("ping_max");
|
||||||
|
})
|
||||||
|
.alterTable("stat_minutely", function (table) {
|
||||||
|
table.dropColumn("ping_min");
|
||||||
|
table.dropColumn("ping_max");
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,26 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.createTable("stat_hourly", function (table) {
|
||||||
|
table.increments("id");
|
||||||
|
table.comment("This table contains the hourly aggregate statistics for each monitor");
|
||||||
|
table.integer("monitor_id").unsigned().notNullable()
|
||||||
|
.references("id").inTable("monitor")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE");
|
||||||
|
table.integer("timestamp")
|
||||||
|
.notNullable()
|
||||||
|
.comment("Unix timestamp rounded down to the nearest hour");
|
||||||
|
table.float("ping").notNullable().comment("Average ping in milliseconds");
|
||||||
|
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
|
||||||
|
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
|
||||||
|
table.smallint("up").notNullable();
|
||||||
|
table.smallint("down").notNullable();
|
||||||
|
|
||||||
|
table.unique([ "monitor_id", "timestamp" ]);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.dropTable("stat_hourly");
|
||||||
|
};
|
@ -0,0 +1,26 @@
|
|||||||
|
exports.up = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("stat_daily", function (table) {
|
||||||
|
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||||
|
})
|
||||||
|
.alterTable("stat_minutely", function (table) {
|
||||||
|
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||||
|
})
|
||||||
|
.alterTable("stat_hourly", function (table) {
|
||||||
|
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.down = function (knex) {
|
||||||
|
return knex.schema
|
||||||
|
.alterTable("stat_daily", function (table) {
|
||||||
|
table.dropColumn("extras");
|
||||||
|
})
|
||||||
|
.alterTable("stat_minutely", function (table) {
|
||||||
|
table.dropColumn("extras");
|
||||||
|
})
|
||||||
|
.alterTable("stat_hourly", function (table) {
|
||||||
|
table.dropColumn("extras");
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,34 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- Rename COLUMNs to another one (suffixed by `_old`)
|
||||||
|
ALTER TABLE monitor
|
||||||
|
RENAME COLUMN kafka_producer_ssl TO kafka_producer_ssl_old;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
RENAME COLUMN kafka_producer_allow_auto_topic_creation TO kafka_producer_allow_auto_topic_creation_old;
|
||||||
|
|
||||||
|
-- Add correct COLUMNs
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD COLUMN kafka_producer_ssl BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
ADD COLUMN kafka_producer_allow_auto_topic_creation BOOLEAN default 0 NOT NULL;
|
||||||
|
|
||||||
|
-- These SQL is still not fully safe. See https://github.com/louislam/uptime-kuma/issues/4039.
|
||||||
|
|
||||||
|
-- Set bring old values from `_old` COLUMNs to correct ones
|
||||||
|
-- UPDATE monitor SET kafka_producer_allow_auto_topic_creation = monitor.kafka_producer_allow_auto_topic_creation_old
|
||||||
|
-- WHERE monitor.kafka_producer_allow_auto_topic_creation_old IS NOT NULL;
|
||||||
|
|
||||||
|
-- UPDATE monitor SET kafka_producer_ssl = monitor.kafka_producer_ssl_old
|
||||||
|
-- WHERE monitor.kafka_producer_ssl_old IS NOT NULL;
|
||||||
|
|
||||||
|
-- Remove old COLUMNs
|
||||||
|
ALTER TABLE monitor
|
||||||
|
DROP COLUMN kafka_producer_allow_auto_topic_creation_old;
|
||||||
|
|
||||||
|
ALTER TABLE monitor
|
||||||
|
DROP COLUMN kafka_producer_ssl_old;
|
||||||
|
|
||||||
|
COMMIT;
|
@ -0,0 +1,18 @@
|
|||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
PRAGMA writable_schema = TRUE;
|
||||||
|
|
||||||
|
UPDATE
|
||||||
|
SQLITE_MASTER
|
||||||
|
SET
|
||||||
|
sql = replace(sql,
|
||||||
|
'monitor_id INTEGER NOT NULL',
|
||||||
|
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||||
|
)
|
||||||
|
WHERE
|
||||||
|
name = 'monitor_tls_info'
|
||||||
|
AND type = 'table';
|
||||||
|
|
||||||
|
PRAGMA writable_schema = RESET;
|
||||||
|
|
||||||
|
COMMIT;
|
@ -0,0 +1,10 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
-- SQLite: Change the data type of the column "config" from VARCHAR to TEXT
|
||||||
|
ALTER TABLE notification RENAME COLUMN config TO config_old;
|
||||||
|
ALTER TABLE notification ADD COLUMN config TEXT;
|
||||||
|
UPDATE notification SET config = config_old;
|
||||||
|
ALTER TABLE notification DROP COLUMN config_old;
|
||||||
|
|
||||||
|
COMMIT;
|
@ -0,0 +1,7 @@
|
|||||||
|
-- You should not modify if this have pushed to Github, unless it does serious wrong with the db.
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
UPDATE monitor SET timeout = (interval * 0.8)
|
||||||
|
WHERE timeout IS NULL OR timeout <= 0;
|
||||||
|
|
||||||
|
COMMIT;
|
@ -0,0 +1,18 @@
|
|||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
PRAGMA writable_schema = TRUE;
|
||||||
|
|
||||||
|
UPDATE
|
||||||
|
SQLITE_MASTER
|
||||||
|
SET
|
||||||
|
sql = replace(sql,
|
||||||
|
'monitor_id INTEGER NOT NULL',
|
||||||
|
'monitor_id INTEGER NOT NULL REFERENCES [monitor] ([id]) ON DELETE CASCADE ON UPDATE CASCADE'
|
||||||
|
)
|
||||||
|
WHERE
|
||||||
|
name = 'monitor_tls_info'
|
||||||
|
AND type = 'table';
|
||||||
|
|
||||||
|
PRAGMA writable_schema = RESET;
|
||||||
|
|
||||||
|
COMMIT;
|
@ -1,15 +0,0 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
uptime-kuma:
|
|
||||||
image: louislam/uptime-kuma:1
|
|
||||||
container_name: uptime-kuma
|
|
||||||
volumes:
|
|
||||||
- uptime-kuma:/app/data
|
|
||||||
ports:
|
|
||||||
- "3001:3001" # <Host Port>:<Container Port>
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
uptime-kuma:
|
|
||||||
|
|
@ -1,276 +0,0 @@
|
|||||||
// install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
|
||||||
// "npm run compile-install-script" to compile install.sh
|
|
||||||
// The command is working on Windows PowerShell and Docker for Windows only.
|
|
||||||
|
|
||||||
|
|
||||||
// curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
|
||||||
println("=====================");
|
|
||||||
println("Uptime Kuma Install Script");
|
|
||||||
println("=====================");
|
|
||||||
println("Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8");
|
|
||||||
println("---------------------------------------");
|
|
||||||
println("This script is designed for Linux and basic usage.");
|
|
||||||
println("For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation");
|
|
||||||
println("---------------------------------------");
|
|
||||||
println("");
|
|
||||||
println("Local - Install Uptime Kuma on your current machine with git, Node.js and pm2");
|
|
||||||
println("Docker - Install Uptime Kuma Docker container");
|
|
||||||
println("");
|
|
||||||
|
|
||||||
if ("$1" != "") {
|
|
||||||
type = "$1";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Which installation method do you prefer? [DOCKER/local]: ", "type");
|
|
||||||
}
|
|
||||||
|
|
||||||
defaultPort = "3001";
|
|
||||||
|
|
||||||
function checkNode() {
|
|
||||||
bash("nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')");
|
|
||||||
println("Node Version: " ++ nodeVersion);
|
|
||||||
|
|
||||||
if (nodeVersion <= "12") {
|
|
||||||
println("Error: Required Node.js 14");
|
|
||||||
call("exit", "1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function deb() {
|
|
||||||
bash("nodeCheck=$(node -v)");
|
|
||||||
bash("apt --yes update");
|
|
||||||
|
|
||||||
if (nodeCheck != "") {
|
|
||||||
checkNode();
|
|
||||||
} else {
|
|
||||||
|
|
||||||
// Old nodejs binary name is "nodejs"
|
|
||||||
bash("check=$(nodejs --version)");
|
|
||||||
if (check != "") {
|
|
||||||
println("Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old.");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("curlCheck=$(curl --version)");
|
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
|
||||||
bash("apt --yes install curl");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Installing Node.js 16");
|
|
||||||
bash("curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt");
|
|
||||||
bash("apt --yes install nodejs");
|
|
||||||
bash("node -v");
|
|
||||||
|
|
||||||
bash("nodeCheckAgain=$(node -v)");
|
|
||||||
|
|
||||||
if (nodeCheckAgain == "") {
|
|
||||||
println("Error during Node.js installation");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(git --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Installing Git");
|
|
||||||
bash("apt --yes install git");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type == "local") {
|
|
||||||
defaultInstallPath = "/opt/uptime-kuma";
|
|
||||||
|
|
||||||
if (exists("/etc/redhat-release")) {
|
|
||||||
os = call("cat", "/etc/redhat-release");
|
|
||||||
distribution = "rhel";
|
|
||||||
|
|
||||||
} else if (exists("/etc/issue")) {
|
|
||||||
bash("os=$(head -n1 /etc/issue | cut -f 1 -d ' ')");
|
|
||||||
if (os == "Ubuntu") {
|
|
||||||
distribution = "ubuntu";
|
|
||||||
|
|
||||||
// Get ubuntu version
|
|
||||||
bash(". /etc/lsb-release");
|
|
||||||
version = DISTRIB_RELEASE;
|
|
||||||
}
|
|
||||||
if (os == "Debian") {
|
|
||||||
distribution = "debian";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("arch=$(uname -i)");
|
|
||||||
|
|
||||||
println("Your OS: " ++ os);
|
|
||||||
println("Distribution: " ++ distribution);
|
|
||||||
println("Version: " ++ version);
|
|
||||||
println("Arch: " ++ arch);
|
|
||||||
|
|
||||||
if ("$3" != "") {
|
|
||||||
port = "$3";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Listening Port [$defaultPort]: ", "port");
|
|
||||||
|
|
||||||
if (port == "") {
|
|
||||||
port = defaultPort;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("$2" != "") {
|
|
||||||
installPath = "$2";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Installation Path [$defaultInstallPath]: ", "installPath");
|
|
||||||
|
|
||||||
if (installPath == "") {
|
|
||||||
installPath = defaultInstallPath;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// CentOS
|
|
||||||
if (distribution == "rhel") {
|
|
||||||
bash("nodeCheck=$(node -v)");
|
|
||||||
|
|
||||||
if (nodeCheck != "") {
|
|
||||||
checkNode();
|
|
||||||
} else {
|
|
||||||
|
|
||||||
bash("dnfCheck=$(dnf --version)");
|
|
||||||
|
|
||||||
// Use yum
|
|
||||||
if (dnfCheck == "") {
|
|
||||||
bash("curlCheck=$(curl --version)");
|
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
|
||||||
bash("yum -y -q install curl");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Installing Node.js 16");
|
|
||||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
|
||||||
bash("yum install -y -q nodejs");
|
|
||||||
} else {
|
|
||||||
bash("curlCheck=$(curl --version)");
|
|
||||||
if (curlCheck == "") {
|
|
||||||
println("Installing Curl");
|
|
||||||
bash("dnf -y install curl");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Installing Node.js 16");
|
|
||||||
bash("curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt");
|
|
||||||
bash("dnf install -y nodejs");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bash("node -v");
|
|
||||||
|
|
||||||
bash("nodeCheckAgain=$(node -v)");
|
|
||||||
|
|
||||||
if (nodeCheckAgain == "") {
|
|
||||||
println("Error during Node.js installation");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(git --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Installing Git");
|
|
||||||
bash("yum -y -q install git");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ubuntu
|
|
||||||
} else if (distribution == "ubuntu") {
|
|
||||||
deb();
|
|
||||||
|
|
||||||
// Debian
|
|
||||||
} else if (distribution == "debian") {
|
|
||||||
deb();
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// Unknown distribution
|
|
||||||
error = 0;
|
|
||||||
|
|
||||||
bash("check=$(git --version)");
|
|
||||||
if (check == "") {
|
|
||||||
error = 1;
|
|
||||||
println("Error: git is not found!");
|
|
||||||
println("help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(node -v)");
|
|
||||||
if (check == "") {
|
|
||||||
error = 1;
|
|
||||||
println("Error: node is not found");
|
|
||||||
println("help: an installation guide is available at https://nodejs.org/en/download");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error > 0) {
|
|
||||||
println("Please install above missing software");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(pm2 --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Installing PM2");
|
|
||||||
bash("npm install pm2 -g && pm2 install pm2-logrotate");
|
|
||||||
bash("pm2 startup");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Check again
|
|
||||||
bash("check=$(pm2 --version)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Error: pm2 is not found!");
|
|
||||||
println("help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("mkdir -p $installPath");
|
|
||||||
bash("cd $installPath");
|
|
||||||
bash("git clone https://github.com/louislam/uptime-kuma.git .");
|
|
||||||
bash("npm run setup");
|
|
||||||
|
|
||||||
bash("pm2 start server/server.js --name uptime-kuma -- --port=$port");
|
|
||||||
|
|
||||||
} else {
|
|
||||||
defaultVolume = "uptime-kuma";
|
|
||||||
|
|
||||||
bash("check=$(docker -v)");
|
|
||||||
if (check == "") {
|
|
||||||
println("Error: docker is not found!");
|
|
||||||
println("help: an installation guide is available at https://docs.docker.com/desktop/");
|
|
||||||
bash("exit 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
bash("check=$(docker info)");
|
|
||||||
|
|
||||||
bash("if [[ \"$check\" == *\"Is the docker daemon running\"* ]]; then
|
|
||||||
\"echo\" \"Error: docker is not running\"
|
|
||||||
\"echo\" \"help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/\"
|
|
||||||
\"exit\" \"1\"
|
|
||||||
fi");
|
|
||||||
|
|
||||||
if ("$3" != "") {
|
|
||||||
port = "$3";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Expose Port [$defaultPort]: ", "port");
|
|
||||||
|
|
||||||
if (port == "") {
|
|
||||||
port = defaultPort;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ("$2" != "") {
|
|
||||||
volume = "$2";
|
|
||||||
} else {
|
|
||||||
call("read", "-p", "Volume Name [$defaultVolume]: ", "volume");
|
|
||||||
|
|
||||||
if (volume == "") {
|
|
||||||
volume = defaultVolume;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println("Port: $port");
|
|
||||||
println("Volume: $volume");
|
|
||||||
bash("docker volume create $volume");
|
|
||||||
bash("docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1");
|
|
||||||
}
|
|
||||||
|
|
||||||
println("http://localhost:$port");
|
|
@ -0,0 +1 @@
|
|||||||
|
docker run -d --restart=always --name uptime-kuma-push louislam/uptime-kuma:push "https://example.com/api/push/key?status=up&msg=OK&ping=" 60
|
@ -0,0 +1,6 @@
|
|||||||
|
const fs = require("fs");
|
||||||
|
|
||||||
|
fs.rmSync("./data/playwright-test", {
|
||||||
|
recursive: true,
|
||||||
|
force: true,
|
||||||
|
});
|
@ -0,0 +1 @@
|
|||||||
|
build/*
|
@ -0,0 +1,18 @@
|
|||||||
|
FROM node AS build
|
||||||
|
RUN useradd --create-home kuma
|
||||||
|
USER kuma
|
||||||
|
WORKDIR /home/kuma
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
COPY --chown=kuma:kuma ./build/ ./build/
|
||||||
|
COPY --chown=kuma:kuma build.js build.js
|
||||||
|
RUN node build.js $TARGETPLATFORM
|
||||||
|
|
||||||
|
FROM debian:bookworm-slim AS release
|
||||||
|
RUN useradd --create-home kuma
|
||||||
|
USER kuma
|
||||||
|
WORKDIR /home/kuma
|
||||||
|
COPY --from=build /home/kuma/uptime-kuma-push ./uptime-kuma-push
|
||||||
|
|
||||||
|
ENTRYPOINT ["/home/kuma/uptime-kuma-push"]
|
||||||
|
|
||||||
|
|
@ -0,0 +1,48 @@
|
|||||||
|
const fs = require("fs");
|
||||||
|
const platform = process.argv[2];
|
||||||
|
|
||||||
|
if (!platform) {
|
||||||
|
console.error("No platform??");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const supportedPlatforms = [
|
||||||
|
{
|
||||||
|
name: "linux/amd64",
|
||||||
|
bin: "./build/uptime-kuma-push-amd64"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "linux/arm64",
|
||||||
|
bin: "./build/uptime-kuma-push-arm64"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "linux/arm/v7",
|
||||||
|
bin: "./build/uptime-kuma-push-armv7"
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
let platformObj = null;
|
||||||
|
|
||||||
|
// Check if the platform is supported
|
||||||
|
for (let i = 0; i < supportedPlatforms.length; i++) {
|
||||||
|
if (supportedPlatforms[i].name === platform) {
|
||||||
|
platformObj = supportedPlatforms[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (platformObj) {
|
||||||
|
let filename = platformObj.bin;
|
||||||
|
|
||||||
|
if (!fs.existsSync(filename)) {
|
||||||
|
console.error(`prebuilt: ${filename} is not found, please build it first`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.renameSync(filename, "./uptime-kuma-push");
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.error("Unsupported platform: " + platform);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"build-docker": "npm run build-all && docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t louislam/uptime-kuma:push . --push --target release",
|
||||||
|
"build-all": "npm run build-win && npm run build-linux-amd64 && npm run build-linux-arm64 && npm run build-linux-armv7 && npm run build-linux-armv6 && npm run build-linux-armv5 && npm run build-linux-riscv64",
|
||||||
|
"build-win": "cross-env GOOS=windows GOARCH=amd64 go build -x -o ./build/uptime-kuma-push.exe uptime-kuma-push.go",
|
||||||
|
"build-linux-amd64": "cross-env GOOS=linux GOARCH=amd64 go build -x -o ./build/uptime-kuma-push-amd64 uptime-kuma-push.go",
|
||||||
|
"build-linux-arm64": "cross-env GOOS=linux GOARCH=arm64 go build -x -o ./build/uptime-kuma-push-arm64 uptime-kuma-push.go",
|
||||||
|
"build-linux-armv7": "cross-env GOOS=linux GOARCH=arm GOARM=7 go build -x -o ./build/uptime-kuma-push-armv7 uptime-kuma-push.go",
|
||||||
|
"build-linux-armv6": "cross-env GOOS=linux GOARCH=arm GOARM=6 go build -x -o ./build/uptime-kuma-push-armv6 uptime-kuma-push.go",
|
||||||
|
"build-linux-armv5": "cross-env GOOS=linux GOARCH=arm GOARM=5 go build -x -o ./build/uptime-kuma-push-armv5 uptime-kuma-push.go",
|
||||||
|
"build-linux-riscv64": "cross-env GOOS=linux GOARCH=riscv64 go build -x -o ./build/uptime-kuma-push-riscv64 uptime-kuma-push.go"
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
os "os"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
if len(os.Args) < 2 {
|
||||||
|
fmt.Fprintln(os.Stderr, "Usage: uptime-kuma-push <url> [<interval>]")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
pushURL := os.Args[1]
|
||||||
|
|
||||||
|
var interval time.Duration
|
||||||
|
|
||||||
|
if len(os.Args) >= 3 {
|
||||||
|
intervalString, err := time.ParseDuration(os.Args[2] + "s")
|
||||||
|
interval = intervalString
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, "Error: Invalid interval", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
interval = 60 * time.Second
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
_, err := http.Get(pushURL)
|
||||||
|
if err == nil {
|
||||||
|
fmt.Print("Pushed!")
|
||||||
|
} else {
|
||||||
|
fmt.Print("Error: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(" Sleeping for", interval)
|
||||||
|
time.Sleep(interval)
|
||||||
|
}
|
||||||
|
}
|
@ -1,228 +0,0 @@
|
|||||||
# install.sh is generated by ./extra/install.batsh, do not modify it directly.
|
|
||||||
# "npm run compile-install-script" to compile install.sh
|
|
||||||
# The command is working on Windows PowerShell and Docker for Windows only.
|
|
||||||
# curl -o kuma_install.sh https://raw.githubusercontent.com/louislam/uptime-kuma/master/install.sh && sudo bash kuma_install.sh
|
|
||||||
"echo" "-e" "====================="
|
|
||||||
"echo" "-e" "Uptime Kuma Install Script"
|
|
||||||
"echo" "-e" "====================="
|
|
||||||
"echo" "-e" "Supported OS: Ubuntu >= 16.04, Debian and CentOS/RHEL 7/8"
|
|
||||||
"echo" "-e" "---------------------------------------"
|
|
||||||
"echo" "-e" "This script is designed for Linux and basic usage."
|
|
||||||
"echo" "-e" "For advanced usage, please go to https://github.com/louislam/uptime-kuma/wiki/Installation"
|
|
||||||
"echo" "-e" "---------------------------------------"
|
|
||||||
"echo" "-e" ""
|
|
||||||
"echo" "-e" "Local - Install Uptime Kuma on your current machine with git, Node.js and pm2"
|
|
||||||
"echo" "-e" "Docker - Install Uptime Kuma Docker container"
|
|
||||||
"echo" "-e" ""
|
|
||||||
if [ "$1" != "" ]; then
|
|
||||||
type="$1"
|
|
||||||
else
|
|
||||||
"read" "-p" "Which installation method do you prefer? [DOCKER/local]: " "type"
|
|
||||||
fi
|
|
||||||
defaultPort="3001"
|
|
||||||
function checkNode {
|
|
||||||
local _0
|
|
||||||
nodeVersion=$(node -e 'console.log(process.versions.node.split(`.`)[0])')
|
|
||||||
"echo" "-e" "Node Version: ""$nodeVersion"
|
|
||||||
_0="12"
|
|
||||||
if [ $(($nodeVersion <= $_0)) == 1 ]; then
|
|
||||||
"echo" "-e" "Error: Required Node.js 14"
|
|
||||||
"exit" "1"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
function deb {
|
|
||||||
nodeCheck=$(node -v)
|
|
||||||
apt --yes update
|
|
||||||
if [ "$nodeCheck" != "" ]; then
|
|
||||||
"checkNode"
|
|
||||||
else
|
|
||||||
# Old nodejs binary name is "nodejs"
|
|
||||||
check=$(nodejs --version)
|
|
||||||
if [ "$check" != "" ]; then
|
|
||||||
"echo" "-e" "Error: 'node' command is not found, but 'nodejs' command is found. Your NodeJS should be too old."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
curlCheck=$(curl --version)
|
|
||||||
if [ "$curlCheck" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Curl"
|
|
||||||
apt --yes install curl
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Installing Node.js 16"
|
|
||||||
curl -sL https://deb.nodesource.com/setup_16.x | bash - > log.txt
|
|
||||||
apt --yes install nodejs
|
|
||||||
node -v
|
|
||||||
nodeCheckAgain=$(node -v)
|
|
||||||
if [ "$nodeCheckAgain" == "" ]; then
|
|
||||||
"echo" "-e" "Error during Node.js installation"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
check=$(git --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Git"
|
|
||||||
apt --yes install git
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
if [ "$type" == "local" ]; then
|
|
||||||
defaultInstallPath="/opt/uptime-kuma"
|
|
||||||
if [ -e "/etc/redhat-release" ]; then
|
|
||||||
os=$("cat" "/etc/redhat-release")
|
|
||||||
distribution="rhel"
|
|
||||||
else
|
|
||||||
if [ -e "/etc/issue" ]; then
|
|
||||||
os=$(head -n1 /etc/issue | cut -f 1 -d ' ')
|
|
||||||
if [ "$os" == "Ubuntu" ]; then
|
|
||||||
distribution="ubuntu"
|
|
||||||
# Get ubuntu version
|
|
||||||
. /etc/lsb-release
|
|
||||||
version="$DISTRIB_RELEASE"
|
|
||||||
fi
|
|
||||||
if [ "$os" == "Debian" ]; then
|
|
||||||
distribution="debian"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
arch=$(uname -i)
|
|
||||||
"echo" "-e" "Your OS: ""$os"
|
|
||||||
"echo" "-e" "Distribution: ""$distribution"
|
|
||||||
"echo" "-e" "Version: ""$version"
|
|
||||||
"echo" "-e" "Arch: ""$arch"
|
|
||||||
if [ "$3" != "" ]; then
|
|
||||||
port="$3"
|
|
||||||
else
|
|
||||||
"read" "-p" "Listening Port [$defaultPort]: " "port"
|
|
||||||
if [ "$port" == "" ]; then
|
|
||||||
port="$defaultPort"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [ "$2" != "" ]; then
|
|
||||||
installPath="$2"
|
|
||||||
else
|
|
||||||
"read" "-p" "Installation Path [$defaultInstallPath]: " "installPath"
|
|
||||||
if [ "$installPath" == "" ]; then
|
|
||||||
installPath="$defaultInstallPath"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
# CentOS
|
|
||||||
if [ "$distribution" == "rhel" ]; then
|
|
||||||
nodeCheck=$(node -v)
|
|
||||||
if [ "$nodeCheck" != "" ]; then
|
|
||||||
"checkNode"
|
|
||||||
else
|
|
||||||
dnfCheck=$(dnf --version)
|
|
||||||
# Use yum
|
|
||||||
if [ "$dnfCheck" == "" ]; then
|
|
||||||
curlCheck=$(curl --version)
|
|
||||||
if [ "$curlCheck" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Curl"
|
|
||||||
yum -y -q install curl
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Installing Node.js 16"
|
|
||||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
|
||||||
yum install -y -q nodejs
|
|
||||||
else
|
|
||||||
curlCheck=$(curl --version)
|
|
||||||
if [ "$curlCheck" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Curl"
|
|
||||||
dnf -y install curl
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Installing Node.js 16"
|
|
||||||
curl -sL https://rpm.nodesource.com/setup_16.x | bash - > log.txt
|
|
||||||
dnf install -y nodejs
|
|
||||||
fi
|
|
||||||
node -v
|
|
||||||
nodeCheckAgain=$(node -v)
|
|
||||||
if [ "$nodeCheckAgain" == "" ]; then
|
|
||||||
"echo" "-e" "Error during Node.js installation"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
check=$(git --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Installing Git"
|
|
||||||
yum -y -q install git
|
|
||||||
fi
|
|
||||||
# Ubuntu
|
|
||||||
else
|
|
||||||
if [ "$distribution" == "ubuntu" ]; then
|
|
||||||
"deb"
|
|
||||||
# Debian
|
|
||||||
else
|
|
||||||
if [ "$distribution" == "debian" ]; then
|
|
||||||
"deb"
|
|
||||||
else
|
|
||||||
# Unknown distribution
|
|
||||||
error=$((0))
|
|
||||||
check=$(git --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
error=$((1))
|
|
||||||
"echo" "-e" "Error: git is not found!"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://git-scm.com/book/en/v2/Getting-Started-Installing-Git"
|
|
||||||
fi
|
|
||||||
check=$(node -v)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
error=$((1))
|
|
||||||
"echo" "-e" "Error: node is not found"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://nodejs.org/en/download"
|
|
||||||
fi
|
|
||||||
if [ $(($error > 0)) == 1 ]; then
|
|
||||||
"echo" "-e" "Please install above missing software"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
check=$(pm2 --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Installing PM2"
|
|
||||||
npm install pm2 -g && pm2 install pm2-logrotate
|
|
||||||
pm2 startup
|
|
||||||
fi
|
|
||||||
# Check again
|
|
||||||
check=$(pm2 --version)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Error: pm2 is not found!"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://pm2.keymetrics.io/docs/usage/quick-start/"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
mkdir -p $installPath
|
|
||||||
cd $installPath
|
|
||||||
git clone https://github.com/louislam/uptime-kuma.git .
|
|
||||||
npm run setup
|
|
||||||
pm2 start server/server.js --name uptime-kuma -- --port=$port
|
|
||||||
else
|
|
||||||
defaultVolume="uptime-kuma"
|
|
||||||
check=$(docker -v)
|
|
||||||
if [ "$check" == "" ]; then
|
|
||||||
"echo" "-e" "Error: docker is not found!"
|
|
||||||
"echo" "-e" "help: an installation guide is available at https://docs.docker.com/desktop/"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
check=$(docker info)
|
|
||||||
if [[ "$check" == *"Is the docker daemon running"* ]]; then
|
|
||||||
"echo" "Error: docker is not running"
|
|
||||||
"echo" "help: a troubleshooting guide is available at https://docs.docker.com/config/daemon/troubleshoot/"
|
|
||||||
"exit" "1"
|
|
||||||
fi
|
|
||||||
if [ "$3" != "" ]; then
|
|
||||||
port="$3"
|
|
||||||
else
|
|
||||||
"read" "-p" "Expose Port [$defaultPort]: " "port"
|
|
||||||
if [ "$port" == "" ]; then
|
|
||||||
port="$defaultPort"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [ "$2" != "" ]; then
|
|
||||||
volume="$2"
|
|
||||||
else
|
|
||||||
"read" "-p" "Volume Name [$defaultVolume]: " "volume"
|
|
||||||
if [ "$volume" == "" ]; then
|
|
||||||
volume="$defaultVolume"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
"echo" "-e" "Port: $port"
|
|
||||||
"echo" "-e" "Volume: $volume"
|
|
||||||
docker volume create $volume
|
|
||||||
docker run -d --restart=always -p $port:3001 -v $volume:/app/data --name uptime-kuma louislam/uptime-kuma:1
|
|
||||||
fi
|
|
||||||
"echo" "-e" "http://localhost:$port"
|
|
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 893 B After Width: | Height: | Size: 1.1 KiB |
@ -1,88 +0,0 @@
|
|||||||
const https = require("https");
|
|
||||||
const http = require("http");
|
|
||||||
const CacheableLookup = require("cacheable-lookup");
|
|
||||||
const { Settings } = require("./settings");
|
|
||||||
const { log } = require("../src/util");
|
|
||||||
|
|
||||||
class CacheableDnsHttpAgent {
|
|
||||||
|
|
||||||
static cacheable = new CacheableLookup();
|
|
||||||
|
|
||||||
static httpAgentList = {};
|
|
||||||
static httpsAgentList = {};
|
|
||||||
|
|
||||||
static enable = false;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Register/Disable cacheable to global agents
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
static async update() {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "update");
|
|
||||||
let isEnable = await Settings.get("dnsCache");
|
|
||||||
|
|
||||||
if (isEnable !== this.enable) {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "value changed");
|
|
||||||
|
|
||||||
if (isEnable) {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "enable");
|
|
||||||
this.cacheable.install(http.globalAgent);
|
|
||||||
this.cacheable.install(https.globalAgent);
|
|
||||||
} else {
|
|
||||||
log.debug("CacheableDnsHttpAgent", "disable");
|
|
||||||
this.cacheable.uninstall(http.globalAgent);
|
|
||||||
this.cacheable.uninstall(https.globalAgent);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.enable = isEnable;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Attach cacheable to HTTP agent
|
|
||||||
* @param {http.Agent} agent Agent to install
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
static install(agent) {
|
|
||||||
this.cacheable.install(agent);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {https.AgentOptions} agentOptions Options to pass to HTTPS agent
|
|
||||||
* @returns {https.Agent} The new HTTPS agent
|
|
||||||
*/
|
|
||||||
static getHttpsAgent(agentOptions) {
|
|
||||||
if (!this.enable) {
|
|
||||||
return new https.Agent(agentOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
let key = JSON.stringify(agentOptions);
|
|
||||||
if (!(key in this.httpsAgentList)) {
|
|
||||||
this.httpsAgentList[key] = new https.Agent(agentOptions);
|
|
||||||
this.cacheable.install(this.httpsAgentList[key]);
|
|
||||||
}
|
|
||||||
return this.httpsAgentList[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {http.AgentOptions} agentOptions Options to pass to the HTTP agent
|
|
||||||
* @returns {https.Agents} The new HTTP agent
|
|
||||||
*/
|
|
||||||
static getHttpAgent(agentOptions) {
|
|
||||||
if (!this.enable) {
|
|
||||||
return new http.Agent(agentOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
let key = JSON.stringify(agentOptions);
|
|
||||||
if (!(key in this.httpAgentList)) {
|
|
||||||
this.httpAgentList[key] = new http.Agent(agentOptions);
|
|
||||||
this.cacheable.install(this.httpAgentList[key]);
|
|
||||||
}
|
|
||||||
return this.httpAgentList[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
CacheableDnsHttpAgent,
|
|
||||||
};
|
|
@ -1,29 +1,46 @@
|
|||||||
|
const isFreeBSD = /^freebsd/.test(process.platform);
|
||||||
|
|
||||||
// Interop with browser
|
// Interop with browser
|
||||||
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
|
||||||
const demoMode = args["demo"] || false;
|
|
||||||
|
|
||||||
const badgeConstants = {
|
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
|
||||||
naColor: "#999",
|
// Dual-stack support for (::)
|
||||||
defaultUpColor: "#66c20a",
|
// Also read HOST if not FreeBSD, as HOST is a system environment variable in FreeBSD
|
||||||
defaultWarnColor: "#eed202",
|
let hostEnv = isFreeBSD ? null : process.env.HOST;
|
||||||
defaultDownColor: "#c2290a",
|
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
|
||||||
defaultPendingColor: "#f8a306",
|
|
||||||
defaultMaintenanceColor: "#1747f5",
|
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
|
||||||
defaultPingColor: "blue", // as defined by badge-maker / shields.io
|
.map(portValue => parseInt(portValue))
|
||||||
defaultStyle: "flat",
|
.find(portValue => !isNaN(portValue));
|
||||||
defaultPingValueSuffix: "ms",
|
|
||||||
defaultPingLabelSuffix: "h",
|
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
|
||||||
defaultUptimeValueSuffix: "%",
|
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
|
||||||
defaultUptimeLabelSuffix: "h",
|
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
|
||||||
defaultCertExpValueSuffix: " days",
|
|
||||||
defaultCertExpLabelSuffix: "h",
|
const isSSL = sslKey && sslCert;
|
||||||
// Values Come From Default Notification Times
|
|
||||||
defaultCertExpireWarnDays: "14",
|
/**
|
||||||
defaultCertExpireDownDays: "7"
|
* Get the local WebSocket URL
|
||||||
};
|
* @returns {string} The local WebSocket URL
|
||||||
|
*/
|
||||||
|
function getLocalWebSocketURL() {
|
||||||
|
const protocol = isSSL ? "wss" : "ws";
|
||||||
|
const host = hostname || "localhost";
|
||||||
|
return `${protocol}://${host}:${port}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const localWebSocketURL = getLocalWebSocketURL();
|
||||||
|
|
||||||
|
const demoMode = args["demo"] || false;
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
args,
|
args,
|
||||||
|
hostname,
|
||||||
|
port,
|
||||||
|
sslKey,
|
||||||
|
sslCert,
|
||||||
|
sslKeyPassphrase,
|
||||||
|
isSSL,
|
||||||
|
localWebSocketURL,
|
||||||
demoMode,
|
demoMode,
|
||||||
badgeConstants,
|
|
||||||
};
|
};
|
||||||
|
@ -0,0 +1,17 @@
|
|||||||
|
const { BeanModel } = require("redbean-node/dist/bean-model");
|
||||||
|
|
||||||
|
class RemoteBrowser extends BeanModel {
|
||||||
|
/**
|
||||||
|
* Returns an object that ready to parse to JSON
|
||||||
|
* @returns {object} Object ready to parse
|
||||||
|
*/
|
||||||
|
toJSON() {
|
||||||
|
return {
|
||||||
|
id: this.id,
|
||||||
|
url: this.url,
|
||||||
|
name: this.name,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = RemoteBrowser;
|
@ -0,0 +1,63 @@
|
|||||||
|
const { MonitorType } = require("./monitor-type");
|
||||||
|
const { UP } = require("../../src/util");
|
||||||
|
const { MongoClient } = require("mongodb");
|
||||||
|
const jsonata = require("jsonata");
|
||||||
|
|
||||||
|
class MongodbMonitorType extends MonitorType {
|
||||||
|
name = "mongodb";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async check(monitor, heartbeat, _server) {
|
||||||
|
let command = { "ping": 1 };
|
||||||
|
if (monitor.databaseQuery) {
|
||||||
|
command = JSON.parse(monitor.databaseQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = await this.runMongodbCommand(monitor.databaseConnectionString, command);
|
||||||
|
|
||||||
|
if (result["ok"] !== 1) {
|
||||||
|
throw new Error("MongoDB command failed");
|
||||||
|
} else {
|
||||||
|
heartbeat.msg = "Command executed successfully";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monitor.jsonPath) {
|
||||||
|
let expression = jsonata(monitor.jsonPath);
|
||||||
|
result = await expression.evaluate(result);
|
||||||
|
if (result) {
|
||||||
|
heartbeat.msg = "Command executed successfully and the jsonata expression produces a result.";
|
||||||
|
} else {
|
||||||
|
throw new Error("Queried value not found.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monitor.expectedValue) {
|
||||||
|
if (result.toString() === monitor.expectedValue) {
|
||||||
|
heartbeat.msg = "Command executed successfully and expected value was found";
|
||||||
|
} else {
|
||||||
|
throw new Error("Query executed, but value is not equal to expected value, value was: [" + JSON.stringify(result) + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
heartbeat.status = UP;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to and run MongoDB command on a MongoDB database
|
||||||
|
* @param {string} connectionString The database connection string
|
||||||
|
* @param {object} command MongoDB command to run on the database
|
||||||
|
* @returns {Promise<(string[] | object[] | object)>} Response from server
|
||||||
|
*/
|
||||||
|
async runMongodbCommand(connectionString, command) {
|
||||||
|
let client = await MongoClient.connect(connectionString);
|
||||||
|
let result = await client.db().command(command);
|
||||||
|
await client.close();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
MongodbMonitorType,
|
||||||
|
};
|
@ -0,0 +1,117 @@
|
|||||||
|
const { MonitorType } = require("./monitor-type");
|
||||||
|
const { log, UP } = require("../../src/util");
|
||||||
|
const mqtt = require("mqtt");
|
||||||
|
const jsonata = require("jsonata");
|
||||||
|
|
||||||
|
class MqttMonitorType extends MonitorType {
|
||||||
|
name = "mqtt";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async check(monitor, heartbeat, server) {
|
||||||
|
const receivedMessage = await this.mqttAsync(monitor.hostname, monitor.mqttTopic, {
|
||||||
|
port: monitor.port,
|
||||||
|
username: monitor.mqttUsername,
|
||||||
|
password: monitor.mqttPassword,
|
||||||
|
interval: monitor.interval,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (monitor.mqttCheckType == null || monitor.mqttCheckType === "") {
|
||||||
|
// use old default
|
||||||
|
monitor.mqttCheckType = "keyword";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monitor.mqttCheckType === "keyword") {
|
||||||
|
if (receivedMessage != null && receivedMessage.includes(monitor.mqttSuccessMessage)) {
|
||||||
|
heartbeat.msg = `Topic: ${monitor.mqttTopic}; Message: ${receivedMessage}`;
|
||||||
|
heartbeat.status = UP;
|
||||||
|
} else {
|
||||||
|
throw Error(`Message Mismatch - Topic: ${monitor.mqttTopic}; Message: ${receivedMessage}`);
|
||||||
|
}
|
||||||
|
} else if (monitor.mqttCheckType === "json-query") {
|
||||||
|
const parsedMessage = JSON.parse(receivedMessage);
|
||||||
|
|
||||||
|
let expression = jsonata(monitor.jsonPath);
|
||||||
|
|
||||||
|
let result = await expression.evaluate(parsedMessage);
|
||||||
|
|
||||||
|
if (result?.toString() === monitor.expectedValue) {
|
||||||
|
heartbeat.msg = "Message received, expected value is found";
|
||||||
|
heartbeat.status = UP;
|
||||||
|
} else {
|
||||||
|
throw new Error("Message received but value is not equal to expected value, value was: [" + result + "]");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw Error("Unknown MQTT Check Type");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to MQTT Broker, subscribe to topic and receive message as String
|
||||||
|
* @param {string} hostname Hostname / address of machine to test
|
||||||
|
* @param {string} topic MQTT topic
|
||||||
|
* @param {object} options MQTT options. Contains port, username,
|
||||||
|
* password and interval (interval defaults to 20)
|
||||||
|
* @returns {Promise<string>} Received MQTT message
|
||||||
|
*/
|
||||||
|
mqttAsync(hostname, topic, options = {}) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const { port, username, password, interval = 20 } = options;
|
||||||
|
|
||||||
|
// Adds MQTT protocol to the hostname if not already present
|
||||||
|
if (!/^(?:http|mqtt|ws)s?:\/\//.test(hostname)) {
|
||||||
|
hostname = "mqtt://" + hostname;
|
||||||
|
}
|
||||||
|
|
||||||
|
const timeoutID = setTimeout(() => {
|
||||||
|
log.debug("mqtt", "MQTT timeout triggered");
|
||||||
|
client.end();
|
||||||
|
reject(new Error("Timeout, Message not received"));
|
||||||
|
}, interval * 1000 * 0.8);
|
||||||
|
|
||||||
|
const mqttUrl = `${hostname}:${port}`;
|
||||||
|
|
||||||
|
log.debug("mqtt", `MQTT connecting to ${mqttUrl}`);
|
||||||
|
|
||||||
|
let client = mqtt.connect(mqttUrl, {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
clientId: "uptime-kuma_" + Math.random().toString(16).substr(2, 8)
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on("connect", () => {
|
||||||
|
log.debug("mqtt", "MQTT connected");
|
||||||
|
|
||||||
|
try {
|
||||||
|
client.subscribe(topic, () => {
|
||||||
|
log.debug("mqtt", "MQTT subscribed to topic");
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
client.end();
|
||||||
|
clearTimeout(timeoutID);
|
||||||
|
reject(new Error("Cannot subscribe topic"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on("error", (error) => {
|
||||||
|
client.end();
|
||||||
|
clearTimeout(timeoutID);
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on("message", (messageTopic, message) => {
|
||||||
|
if (messageTopic === topic) {
|
||||||
|
client.end();
|
||||||
|
clearTimeout(timeoutID);
|
||||||
|
resolve(message.toString("utf8"));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
MqttMonitorType,
|
||||||
|
};
|
@ -0,0 +1,31 @@
|
|||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
const { UP } = require("../../src/util");
|
||||||
|
|
||||||
|
class Bitrix24 extends NotificationProvider {
|
||||||
|
name = "Bitrix24";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
const okMsg = "Sent Successfully.";
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = {
|
||||||
|
user_id: notification.bitrix24UserID,
|
||||||
|
message: "[B]Uptime Kuma[/B]",
|
||||||
|
"ATTACH[COLOR]": (heartbeatJSON ?? {})["status"] === UP ? "#b73419" : "#67b518",
|
||||||
|
"ATTACH[BLOCKS][0][MESSAGE]": msg
|
||||||
|
};
|
||||||
|
|
||||||
|
await axios.get(`${notification.bitrix24WebhookURL}/im.notify.system.add.json`, { params });
|
||||||
|
return okMsg;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Bitrix24;
|
@ -0,0 +1,23 @@
|
|||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class CallMeBot extends NotificationProvider {
|
||||||
|
name = "CallMeBot";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
const okMsg = "Sent Successfully.";
|
||||||
|
try {
|
||||||
|
const url = new URL(notification.callMeBotEndpoint);
|
||||||
|
url.searchParams.set("text", msg);
|
||||||
|
await axios.get(url.toString());
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = CallMeBot;
|
@ -0,0 +1,39 @@
|
|||||||
|
const NotificationProvider = require("./notification-provider");
|
||||||
|
const axios = require("axios");
|
||||||
|
|
||||||
|
class Cellsynt extends NotificationProvider {
|
||||||
|
name = "Cellsynt";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @inheritdoc
|
||||||
|
*/
|
||||||
|
async send(notification, msg, monitorJSON = null, heartbeatJSON = null) {
|
||||||
|
const okMsg = "Sent Successfully.";
|
||||||
|
const data = {
|
||||||
|
// docs at https://www.cellsynt.com/en/sms/api-integration
|
||||||
|
params: {
|
||||||
|
"username": notification.cellsyntLogin,
|
||||||
|
"password": notification.cellsyntPassword,
|
||||||
|
"destination": notification.cellsyntDestination,
|
||||||
|
"text": msg.replace(/[^\x00-\x7F]/g, ""),
|
||||||
|
"originatortype": notification.cellsyntOriginatortype,
|
||||||
|
"originator": notification.cellsyntOriginator,
|
||||||
|
"allowconcat": notification.cellsyntAllowLongSMS ? 6 : 1
|
||||||
|
}
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const resp = await axios.post("https://se-1.cellsynt.net/sms.php", null, data);
|
||||||
|
if (resp.data == null ) {
|
||||||
|
throw new Error("Could not connect to Cellsynt, please try again.");
|
||||||
|
} else if (resp.data.includes("Error:")) {
|
||||||
|
resp.data = resp.data.replaceAll("Error:", "");
|
||||||
|
throw new Error(resp.data);
|
||||||
|
}
|
||||||
|
return okMsg;
|
||||||
|
} catch (error) {
|
||||||
|
this.throwGeneralAxiosError(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Cellsynt;
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue