chore: enable formatting over the entire codebase in CI (#6655)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
Frank Elsinga
2026-01-09 02:10:36 +01:00
committed by GitHub
parent 6658f2ce41
commit 0f61d7ee1b
422 changed files with 30899 additions and 27379 deletions

View File

@@ -1,9 +1,5 @@
module.exports = {
ignorePatterns: [
"test/*.js",
"server/modules/*",
"src/util.js"
],
ignorePatterns: ["test/*.js", "server/modules/*", "src/util.js"],
root: true,
env: {
browser: true,
@@ -23,94 +19,93 @@ module.exports = {
sourceType: "module",
requireConfigFile: false,
},
plugins: [
"jsdoc",
"@typescript-eslint",
],
plugins: ["jsdoc", "@typescript-eslint"],
rules: {
"yoda": "error",
eqeqeq: [ "warn", "smart" ],
"camelcase": [ "warn", {
"properties": "never",
"ignoreImports": true
}],
"no-unused-vars": [ "warn", {
"args": "none"
}],
yoda: "error",
eqeqeq: ["warn", "smart"],
camelcase: [
"warn",
{
properties: "never",
ignoreImports: true,
},
],
"no-unused-vars": [
"warn",
{
args: "none",
},
],
"vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off",
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"vue/multi-word-component-names": "off",
"curly": "error",
curly: "error",
"no-var": "error",
"no-throw-literal": "error",
"no-constant-condition": [ "error", {
"checkLoops": false,
}],
"no-constant-condition": [
"error",
{
checkLoops: false,
},
],
//"no-console": "warn",
"no-extra-boolean-cast": "off",
"no-unneeded-ternary": "error",
//"prefer-template": "error",
"no-empty": [ "error", {
"allowEmptyCatch": true
}],
"no-empty": [
"error",
{
allowEmptyCatch: true,
},
],
"no-control-regex": "off",
"one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }],
"one-var": ["error", "never"],
"max-statements-per-line": ["error", { max: 1 }],
"jsdoc/check-tag-names": [
"error",
{
"definedTags": [ "link" ]
}
definedTags: ["link"],
},
],
"jsdoc/no-undefined-types": "off",
"jsdoc/no-defaults": [
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/no-defaults": ["error", { noOptionalParamNames: true }],
"jsdoc/require-throws": "warn",
"jsdoc/require-jsdoc": [
"error",
{
"require": {
"FunctionDeclaration": true,
"MethodDefinition": true,
}
}
require: {
FunctionDeclaration: true,
MethodDefinition: true,
},
},
],
"jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns-check": ["error", { reportMissingReturnForUndefinedTypes: false }],
"jsdoc/require-returns": [
"warn",
{
"forceRequireReturn": true,
"forceReturnsWithAsync": true
}
forceRequireReturn: true,
forceReturnsWithAsync: true,
},
],
"jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn"
"jsdoc/require-param-description": "warn",
},
"overrides": [
overrides: [
// Override for TypeScript
{
"files": [
"**/*.ts",
],
extends: [
"plugin:@typescript-eslint/recommended",
],
"rules": {
files: ["**/*.ts"],
extends: ["plugin:@typescript-eslint/recommended"],
rules: {
"jsdoc/require-returns-type": "off",
"jsdoc/require-param-type": "off",
"@typescript-eslint/no-explicit-any": "off",
"prefer-const": "off",
}
}
]
},
},
],
};

View File

@@ -38,9 +38,8 @@ jobs:
run: npm run lint-fix:style
continue-on-error: true
# TODO: disabled until we have agreed that this is the formatting that we want to enforce
# - name: Auto-format code with Prettier
# run: npm run fmt
# continue-on-error: true
- name: Auto-format code with Prettier
run: npm run fmt
continue-on-error: true
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@@ -1,38 +1,2 @@
# Dependencies
node_modules/
# Build output
dist/
build/
# Data directories
data/
# Test output
test-results/
playwright-report/
private/
# Logs
*.log
npm-debug.log*
# OS files
.DS_Store
Thumbs.db
# IDE
.vscode/
.idea/
# Lock files
package-lock.json
pnpm-lock.yaml
yarn.lock
# Generated files
*.min.js
*.min.css
# Docker
docker/
# language files
src/lang/*.json

View File

@@ -54,8 +54,7 @@ to review the appropriate one for your contribution.
[**PLEASE SEE OUR SECURITY POLICY.**](SECURITY.md)
[advisory]: https://github.com/louislam/uptime-kuma/security/advisories/new
[issue]:
https://github.com/louislam/uptime-kuma/issues/new?template=security_issue.yml
[issue]: https://github.com/louislam/uptime-kuma/issues/new?template=security_issue.yml
</p>
</details>
@@ -65,7 +64,6 @@ to review the appropriate one for your contribution.
If you come across a bug and think you can solve, we appreciate your work.
Please make sure that you follow these rules:
- keep the PR as small as possible, fix only one thing at a time => keeping it
reviewable
- test that your code does what you claim it does.
@@ -84,7 +82,6 @@ to review the appropriate one for your contribution.
We use Weblate to localise this project into many languages. If you want to help translate Uptime Kuma into your language, please see [these instructions on how to translate using Weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
There are some cases where a change cannot be done directly in Weblate and requires a PR:
- A text may not yet be localisable. In this case, **adding a new language key** via `{{ $t("Translation key") }}` or [`<i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html) might be necessary.
- Language keys need to be **added to `en.json`** to appear in Weblate. If this has not been done, a PR is appreciated.
- **Adding a new language** requires creating a new file. See [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
@@ -98,7 +95,6 @@ to review the appropriate one for your contribution.
<p>
To set up a new notification provider these files need to be modified/created:
- `server/notification-providers/PROVIDER_NAME.js` is where the heart of the
notification provider lives.
@@ -135,7 +131,6 @@ to review the appropriate one for your contribution.
translations (`{{ $t("Translation key") }}`,
[`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html))
in `src/lang/en.json` to enable our translators to translate this
- `src/components/notifications/index.js` is where the frontend of the
provider needs to be registered. _If you have an idea how we can skip this
step, we would love to hear about it ^^_
@@ -147,7 +142,6 @@ to review the appropriate one for your contribution.
To make sure you have tested the notification provider, please include
screenshots of the following events in the pull-request description:
- `UP`/`DOWN`
- Certificate Expiry via <https://expired.badssl.com/>
- Domain Expiry via <https://google.com/> and a larger time set
@@ -159,7 +153,7 @@ to review the appropriate one for your contribution.
```md
| Event | Before | After |
|--------------------|-----------------------|----------------------|
| ------------------ | --------------------- | -------------------- |
| `UP` | ![Before](image-link) | ![After](image-link) |
| `DOWN` | ![Before](image-link) | ![After](image-link) |
| Certificate-expiry | ![Before](image-link) | ![After](image-link) |
@@ -177,7 +171,6 @@ to review the appropriate one for your contribution.
<p>
To set up a new notification provider these files need to be modified/created:
- `server/monitor-types/MONITORING_TYPE.js` is the core of each monitor.
The `async check(...)`-function should:
- in the happy-path: set `heartbeat.msg` to a successful message and set `heartbeat.status = UP`
@@ -220,7 +213,6 @@ to review the appropriate one for your contribution.
<p>
Contributing is easy and fun. We will guide you through the process:
1. **Fork** the [Uptime-Kuma repository](https://github.com/louislam/uptime-kuma/) and **clone** it to your local machine.
2. **Create a new branch** for your changes (e.g., `signal-notification-provider`).
3. **Make your changes** and **commit** them with a clear message.
@@ -235,7 +227,6 @@ to review the appropriate one for your contribution.
A PR should remain in **draft status** until all tasks are completed.
Only change the status to **Ready for Review** when:
- You have implemented all planned changes.
- Your code is fully tested and ready for review.
- You have updated or created the necessary tests.
@@ -248,7 +239,6 @@ to review the appropriate one for your contribution.
- Merging multiple issues by a huge PR is more difficult to review and causes
conflicts with other PRs. Please
- (if possible) **create one PR for one issue** or
- (if not possible) **explain which issues a PR addresses and why this PR
should not be broken apart**
@@ -269,6 +259,7 @@ to review the appropriate one for your contribution.
### Continuous Integration
All pull requests must pass our continuous integration checks. These checks include:
- **Linting**: We use ESLint and Stylelint for code quality checks. You can run the linter locally with `npm run lint`.
- **Formatting**: We use Prettier for code formatting. You can format your code with `npm run fmt` (or CI will do this for you)
- **Testing**: We use Playwright for end-to-end tests and have a suite of backend tests. You can run the tests locally with `npm test`.
@@ -297,13 +288,11 @@ you can finally start the app. The goal is to make the Uptime Kuma installation
as easy as installing a mobile app.
- Easy to install for non-Docker users
- no native build dependency is needed (for `x86_64`/`armv7`/`arm64`)
- no extra configuration and
- no extra effort required to get it running
- Single container for Docker users
- no complex docker-compose file
- mapping the volume and exposing the port should be the only requirements
@@ -480,11 +469,9 @@ We have a few procedures we follow. These are documented here:
- <details><summary><b>Set up a Docker Builder</b> (click to expand)</summary>
<p>
- amd64, armv7 using local.
- arm64 using remote arm64 cpu, as the emulator is too slow and can no longer
pass the `npm ci` command.
1. Add the public key to the remote server.
2. Add the remote context. The remote machine must be arm64 and installed
Docker CE.
@@ -515,7 +502,6 @@ We have a few procedures we follow. These are documented here:
- <details><summary><b>Release</b> (click to expand)</summary>
<p>
1. Draft a release note
2. Make sure the repo is cleared
3. If the healthcheck is updated, remember to re-compile it:
@@ -528,7 +514,6 @@ We have a few procedures we follow. These are documented here:
9. Deploy to the demo server: `npm run deploy-demo-server`
These Items need to be checked:
- [ ] Check all tags is fine on
<https://hub.docker.com/r/louislam/uptime-kuma/tags>
- [ ] Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 /
@@ -540,7 +525,6 @@ We have a few procedures we follow. These are documented here:
- <details><summary><b>Release Beta</b> (click to expand)</summary>
<p>
1. Draft a release note, check `This is a pre-release`
2. Make sure the repo is cleared
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`

View File

@@ -45,6 +45,7 @@ cd uptime-kuma
curl -o compose.yaml https://raw.githubusercontent.com/louislam/uptime-kuma/master/compose.yaml
docker compose up -d
```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
> [!WARNING]
@@ -55,6 +56,7 @@ Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001
```bash
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:2
```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
If you want to limit exposure to localhost only:
@@ -63,8 +65,6 @@ If you want to limit exposure to localhost only:
docker run ... -p 127.0.0.1:3001:3001 ...
```
### 💪🏻 Non-Docker
Requirements:
@@ -93,6 +93,7 @@ npm install pm2 -g && pm2 install pm2-logrotate
# Start Server
pm2 start server/server.js --name uptime-kuma
```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
More useful PM2 Commands

View File

@@ -10,7 +10,7 @@
- Do not report any upstream dependency issues / scan result by any tools. It will be closed immediately without explanations. Unless you have PoC to prove that the upstream issue affected Uptime Kuma.
- Do not use the public issue tracker or discuss it in public as it will cause
more damage.
more damage.
## Do you accept other 3rd-party bug bounty platforms?

View File

@@ -22,10 +22,11 @@ export default defineConfig({
// Reporter to use
reporter: [
[
"html", {
"html",
{
outputFolder: "../private/playwright-report",
open: "never",
}
},
],
],
@@ -47,7 +48,7 @@ export default defineConfig({
{
name: "specs",
use: { ...devices["Desktop Chrome"] },
dependencies: [ "run-once setup" ],
dependencies: ["run-once setup"],
},
/*
{

View File

@@ -15,13 +15,13 @@ export default defineConfig({
port: 3000,
},
define: {
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
FRONTEND_VERSION: JSON.stringify(process.env.npm_package_version),
"process.env": {},
},
plugins: [
vue(),
visualizer({
filename: "tmp/dist-stats.html"
filename: "tmp/dist-stats.html",
}),
viteCompression({
algorithm: "gzip",
@@ -40,21 +40,19 @@ export default defineConfig({
],
css: {
postcss: {
"parser": postCssScss,
"map": false,
"plugins": [ postcssRTLCSS ]
}
parser: postCssScss,
map: false,
plugins: [postcssRTLCSS],
},
},
build: {
commonjsOptions: {
include: [ /.js$/ ],
include: [/.js$/],
},
rollupOptions: {
output: {
manualChunks(id, { getModuleInfo, getModuleIds }) {
}
}
manualChunks(id, { getModuleInfo, getModuleIds }) {},
},
},
},
}
});

View File

@@ -67,10 +67,7 @@ async function createTables() {
table.increments("id");
table.string("name", 150);
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("user_id").unsigned().references("id").inTable("user").onDelete("SET NULL").onUpdate("CASCADE");
table.integer("interval").notNullable().defaultTo(20);
table.text("url");
table.string("type", 20);
@@ -83,7 +80,7 @@ async function createTables() {
table.boolean("ignore_tls").notNullable().defaultTo(false);
table.boolean("upside_down").notNullable().defaultTo(false);
table.integer("maxredirects").notNullable().defaultTo(10);
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
table.text("accepted_statuscodes_json").notNullable().defaultTo('["200-299"]');
table.string("dns_resolve_type", 5);
table.string("dns_resolve_server", 255);
table.string("dns_last_result", 255);
@@ -94,11 +91,9 @@ async function createTables() {
table.text("headers").defaultTo(null);
table.text("basic_auth_user").defaultTo(null);
table.text("basic_auth_pass").defaultTo(null);
table.integer("docker_host").unsigned()
.references("id").inTable("docker_host");
table.integer("docker_host").unsigned().references("id").inTable("docker_host");
table.string("docker_container", 255);
table.integer("proxy_id").unsigned()
.references("id").inTable("proxy");
table.integer("proxy_id").unsigned().references("id").inTable("proxy");
table.boolean("expiry_notification").defaultTo(true);
table.text("mqtt_topic");
table.string("mqtt_success_message", 255);
@@ -130,8 +125,12 @@ async function createTables() {
await knex.schema.createTable("heartbeat", (table) => {
table.increments("id");
table.boolean("important").notNullable().defaultTo(false);
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.smallint("status").notNullable();
@@ -143,9 +142,9 @@ async function createTables() {
table.integer("down_count").notNullable().defaultTo(0);
table.index("important");
table.index([ "monitor_id", "time" ], "monitor_time_index");
table.index(["monitor_id", "time"], "monitor_time_index");
table.index("monitor_id");
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
table.index(["monitor_id", "important", "time"], "monitor_important_time_index");
});
// incident
@@ -166,10 +165,7 @@ async function createTables() {
table.increments("id");
table.string("title", 150).notNullable();
table.text("description").notNullable();
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("user_id").unsigned().references("id").inTable("user").onDelete("SET NULL").onUpdate("CASCADE");
table.boolean("active").notNullable().defaultTo(true);
table.string("strategy", 50).notNullable().defaultTo("single");
table.datetime("start_date");
@@ -181,7 +177,7 @@ async function createTables() {
table.integer("interval_day");
table.index("active");
table.index([ "strategy", "active" ], "manual_active");
table.index(["strategy", "active"], "manual_active");
table.index("user_id", "maintenance_user_id");
});
@@ -209,13 +205,21 @@ async function createTables() {
await knex.schema.createTable("maintenance_status_page", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned().notNullable()
.references("id").inTable("status_page")
table
.integer("status_page_id")
.unsigned()
.notNullable()
.references("id")
.inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
table
.integer("maintenance_id")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
});
@@ -223,8 +227,12 @@ async function createTables() {
// maintenance_timeslot
await knex.schema.createTable("maintenance_timeslot", (table) => {
table.increments("id");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
table
.integer("maintenance_id")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.datetime("start_date").notNullable();
@@ -232,35 +240,51 @@ async function createTables() {
table.boolean("generated_next").defaultTo(false);
table.index("maintenance_id");
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
table.index(["maintenance_id", "start_date", "end_date"], "active_timeslot_index");
table.index("generated_next", "generated_next_index");
});
// monitor_group
await knex.schema.createTable("monitor_group", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("group_id").unsigned().notNullable()
.references("id").inTable("group")
table
.integer("group_id")
.unsigned()
.notNullable()
.references("id")
.inTable("group")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("weight").notNullable().defaultTo(1000);
table.boolean("send_url").notNullable().defaultTo(false);
table.index([ "monitor_id", "group_id" ], "fk");
table.index(["monitor_id", "group_id"], "fk");
});
// monitor_maintenance
await knex.schema.createTable("monitor_maintenance", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
table
.integer("maintenance_id")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
@@ -281,16 +305,24 @@ async function createTables() {
// monitor_notification
await knex.schema.createTable("monitor_notification", (table) => {
table.increments("id").unsigned(); // TODO: no auto increment????
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("notification_id").unsigned().notNullable()
.references("id").inTable("notification")
table
.integer("notification_id")
.unsigned()
.notNullable()
.references("id")
.inTable("notification")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
table.index(["monitor_id", "notification_id"], "monitor_notification_index");
});
// tag
@@ -304,12 +336,20 @@ async function createTables() {
// monitor_tag
await knex.schema.createTable("monitor_tag", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("tag_id").unsigned().notNullable()
.references("id").inTable("tag")
table
.integer("tag_id")
.unsigned()
.notNullable()
.references("id")
.inTable("tag")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("value");
@@ -318,8 +358,12 @@ async function createTables() {
// monitor_tls_info
await knex.schema.createTable("monitor_tls_info", (table) => {
table.increments("id");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("info_json");
@@ -331,8 +375,8 @@ async function createTables() {
table.string("type", 50).notNullable();
table.integer("monitor_id").unsigned().notNullable();
table.integer("days").notNullable();
table.unique([ "type", "monitor_id", "days" ]);
table.index([ "type", "monitor_id", "days" ], "good_index");
table.unique(["type", "monitor_id", "days"]);
table.index(["type", "monitor_id", "days"], "good_index");
});
// setting
@@ -346,8 +390,11 @@ async function createTables() {
// status_page_cname
await knex.schema.createTable("status_page_cname", (table) => {
table.increments("id");
table.integer("status_page_id").unsigned()
.references("id").inTable("status_page")
table
.integer("status_page_id")
.unsigned()
.references("id")
.inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.string("domain").notNullable().unique().collate("utf8_general_ci");
@@ -396,8 +443,12 @@ async function createTables() {
table.increments("id").primary();
table.string("key", 255).notNullable();
table.string("name", 255).notNullable();
table.integer("user_id").unsigned().notNullable()
.references("id").inTable("user")
table
.integer("user_id")
.unsigned()
.notNullable()
.references("id")
.inTable("user")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
@@ -430,9 +481,7 @@ async function createTables() {
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
*/
await knex.schema
.dropTableIfExists("maintenance_timeslot")
.table("maintenance", function (table) {
await knex.schema.dropTableIfExists("maintenance_timeslot").table("maintenance", function (table) {
table.text("cron");
table.string("timezone", 255);
table.integer("duration");
@@ -444,10 +493,7 @@ async function createTables() {
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
*/
await knex.schema.table("monitor", function (table) {
table.integer("parent").unsigned()
.references("id").inTable("monitor")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("parent").unsigned().references("id").inTable("monitor").onDelete("SET NULL").onUpdate("CASCADE");
});
/*

View File

@@ -3,39 +3,41 @@ exports.up = function (knex) {
.createTable("stat_minutely", function (table) {
table.increments("id");
table.comment("This table contains the minutely aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest minute");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest minute");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
table.unique(["monitor_id", "timestamp"]);
})
.createTable("stat_daily", function (table) {
table.increments("id");
table.comment("This table contains the daily aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest day");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest day");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
table.unique(["monitor_id", "timestamp"]);
});
};
exports.down = function (knex) {
return knex.schema
.dropTable("stat_minutely")
.dropTable("stat_daily");
return knex.schema.dropTable("stat_minutely").dropTable("stat_daily");
};

View File

@@ -1,16 +1,13 @@
exports.up = function (knex) {
// Add new column heartbeat.end_time
return knex.schema
.alterTable("heartbeat", function (table) {
return knex.schema.alterTable("heartbeat", function (table) {
table.datetime("end_time").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
// Rename heartbeat.start_time to heartbeat.time
return knex.schema
.alterTable("heartbeat", function (table) {
return knex.schema.alterTable("heartbeat", function (table) {
table.dropColumn("end_time");
});
};

View File

@@ -1,15 +1,12 @@
exports.up = function (knex) {
// Add new column heartbeat.retries
return knex.schema
.alterTable("heartbeat", function (table) {
return knex.schema.alterTable("heartbeat", function (table) {
table.integer("retries").notNullable().defaultTo(0);
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("heartbeat", function (table) {
return knex.schema.alterTable("heartbeat", function (table) {
table.dropColumn("retries");
});
};

View File

@@ -1,16 +1,13 @@
exports.up = function (knex) {
// Add new column monitor.mqtt_check_type
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
});
};
exports.down = function (knex) {
// Drop column monitor.mqtt_check_type
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("mqtt_check_type");
});
};

View File

@@ -1,14 +1,12 @@
exports.up = function (knex) {
// update monitor.push_token to 32 length
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("push_token", 32).alter();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("push_token", 20).alter();
});
};

View File

@@ -5,9 +5,14 @@ exports.up = function (knex) {
table.string("name", 255).notNullable();
table.string("url", 255).notNullable();
table.integer("user_id").unsigned();
}).alterTable("monitor", function (table) {
})
.alterTable("monitor", function (table) {
// Add new column monitor.remote_browser
table.integer("remote_browser").nullable().defaultTo(null).unsigned()
table
.integer("remote_browser")
.nullable()
.defaultTo(null)
.unsigned()
.index()
.references("id")
.inTable("remote_browser");

View File

@@ -1,6 +1,5 @@
exports.up = function (knex) {
return knex.schema
.alterTable("status_page", function (table) {
return knex.schema.alterTable("status_page", function (table) {
table.integer("auto_refresh_interval").defaultTo(300).unsigned();
});
};

View File

@@ -1,14 +1,29 @@
exports.up = function (knex) {
return knex.schema
.alterTable("stat_daily", function (table) {
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table
.float("ping_min")
.notNullable()
.defaultTo(0)
.comment("Minimum ping during this period in milliseconds");
table
.float("ping_max")
.notNullable()
.defaultTo(0)
.comment("Maximum ping during this period in milliseconds");
})
.alterTable("stat_minutely", function (table) {
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table
.float("ping_min")
.notNullable()
.defaultTo(0)
.comment("Minimum ping during this period in milliseconds");
table
.float("ping_max")
.notNullable()
.defaultTo(0)
.comment("Maximum ping during this period in milliseconds");
});
};
exports.down = function (knex) {

View File

@@ -1,26 +1,26 @@
exports.up = function (knex) {
return knex.schema
.createTable("stat_hourly", function (table) {
return knex.schema.createTable("stat_hourly", function (table) {
table.increments("id");
table.comment("This table contains the hourly aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest hour");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest hour");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique([ "monitor_id", "timestamp" ]);
table.unique(["monitor_id", "timestamp"]);
});
};
exports.down = function (knex) {
return knex.schema
.dropTable("stat_hourly");
return knex.schema.dropTable("stat_hourly");
};

View File

@@ -9,7 +9,6 @@ exports.up = function (knex) {
.alterTable("stat_hourly", function (table) {
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
});
};
exports.down = function (knex) {

View File

@@ -1,8 +1,7 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("snmp_oid").defaultTo(null);
table.enum("snmp_version", [ "1", "2c", "3" ]).defaultTo("2c");
table.enum("snmp_version", ["1", "2c", "3"]).defaultTo("2c");
table.string("json_path_operator").defaultTo(null);
});
};

View File

@@ -1,13 +1,11 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("cache_bust").notNullable().defaultTo(false);
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("cache_bust");
});
};

View File

@@ -1,6 +1,5 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.text("conditions").notNullable().defaultTo("[]");
});
};

View File

@@ -4,7 +4,6 @@ exports.up = function (knex) {
table.string("rabbitmq_username");
table.string("rabbitmq_password");
});
};
exports.down = function (knex) {
@@ -13,5 +12,4 @@ exports.down = function (knex) {
table.dropColumn("rabbitmq_username");
table.dropColumn("rabbitmq_password");
});
};

View File

@@ -1,7 +1,6 @@
// Update info_json column to LONGTEXT mainly for MariaDB
exports.up = function (knex) {
return knex.schema
.alterTable("monitor_tls_info", function (table) {
return knex.schema.alterTable("monitor_tls_info", function (table) {
table.text("info_json", "longtext").alter();
});
};

View File

@@ -1,6 +1,5 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("smtp_security").defaultTo(null);
});
};

View File

@@ -1,7 +1,6 @@
// Add websocket ignore headers and websocket subprotocol
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("ws_ignore_sec_websocket_accept_header").notNullable().defaultTo(false);
table.string("ws_subprotocol", 255).notNullable().defaultTo("");
});

View File

@@ -4,12 +4,12 @@ exports.up = function (knex) {
.alterTable("status_page", function (table) {
table.renameColumn("google_analytics_tag_id", "analytics_id");
table.string("analytics_script_url");
table.enu("analytics_type", [ "google", "umami", "plausible", "matomo" ]).defaultTo(null);
}).then(() => {
table.enu("analytics_type", ["google", "umami", "plausible", "matomo"]).defaultTo(null);
})
.then(() => {
// After a succesful migration, add google as default for previous pages
knex("status_page").whereNotNull("analytics_id").update({
"analytics_type": "google",
analytics_type: "google",
});
});
};

View File

@@ -5,18 +5,15 @@ ALTER TABLE monitor ADD ping_per_request_timeout INTEGER default 2 not null;
*/
exports.up = function (knex) {
// Add new columns to table monitor
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.integer("ping_count").defaultTo(1).notNullable();
table.boolean("ping_numeric").defaultTo(true).notNullable();
table.integer("ping_per_request_timeout").defaultTo(2).notNullable();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ping_count");
table.dropColumn("ping_numeric");
table.dropColumn("ping_per_request_timeout");

View File

@@ -1,7 +1,6 @@
// Fix #5721: Change proxy port column type to integer to support larger port numbers
exports.up = function (knex) {
return knex.schema
.alterTable("proxy", function (table) {
return knex.schema.alterTable("proxy", function (table) {
table.integer("port").alter();
});
};

View File

@@ -1,7 +1,6 @@
// Add column custom_url to monitor_group table
exports.up = function (knex) {
return knex.schema
.alterTable("monitor_group", function (table) {
return knex.schema.alterTable("monitor_group", function (table) {
table.text("custom_url", "text");
});
};

View File

@@ -1,13 +1,11 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("ip_family").defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ip_family");
});
};

View File

@@ -1,6 +1,5 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("manual_status").defaultTo(null);
});
};

View File

@@ -1,28 +1,27 @@
// Add column last_start_date to maintenance table
exports.up = async function (knex) {
await knex.schema
.alterTable("maintenance", function (table) {
await knex.schema.alterTable("maintenance", function (table) {
table.datetime("last_start_date");
});
// Perform migration for recurring-interval strategy
const recurringMaintenances = await knex("maintenance").where({
const recurringMaintenances = await knex("maintenance")
.where({
strategy: "recurring-interval",
cron: "* * * * *"
}).select("id", "start_time");
cron: "* * * * *",
})
.select("id", "start_time");
// eslint-disable-next-line camelcase
const maintenanceUpdates = recurringMaintenances.map(async ({ start_time, id }) => {
// eslint-disable-next-line camelcase
const [ hourStr, minuteStr ] = start_time.split(":");
const [hourStr, minuteStr] = start_time.split(":");
const hour = parseInt(hourStr, 10);
const minute = parseInt(minuteStr, 10);
const cron = `${minute} ${hour} * * *`;
await knex("maintenance")
.where({ id })
.update({ cron });
await knex("maintenance").where({ id }).update({ cron });
});
await Promise.all(maintenanceUpdates);
};

View File

@@ -1,7 +1,6 @@
// Fix: Change manual_status column type to smallint
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.smallint("manual_status").alter();
});
};

View File

@@ -1,6 +1,5 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("oauth_audience").nullable().defaultTo(null);
});
};

View File

@@ -1,15 +1,13 @@
exports.up = function (knex) {
// Add new column monitor.mqtt_websocket_path
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("mqtt_websocket_path", 255).nullable();
});
};
exports.down = function (knex) {
// Drop column monitor.mqtt_websocket_path
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("mqtt_websocket_path");
});
};

View File

@@ -1,6 +1,5 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
// Fix ip_family, change to varchar instead of boolean
// possible values are "ipv4" and "ipv6"
table.string("ip_family", 4).defaultTo(null).alter();
@@ -8,8 +7,7 @@ exports.up = function (knex) {
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
// Rollback to boolean
table.boolean("ip_family").defaultTo(null).alter();
});

View File

@@ -1,15 +1,13 @@
exports.up = function (knex) {
// Add new column status_page.show_only_last_heartbeat
return knex.schema
.alterTable("status_page", function (table) {
return knex.schema.alterTable("status_page", function (table) {
table.boolean("show_only_last_heartbeat").notNullable().defaultTo(false);
});
};
exports.down = function (knex) {
// Drop column status_page.show_only_last_heartbeat
return knex.schema
.alterTable("status_page", function (table) {
return knex.schema.alterTable("status_page", function (table) {
table.dropColumn("show_only_last_heartbeat");
});
};

View File

@@ -9,11 +9,11 @@ exports.up = async function (knex) {
// Create partial indexes with predicate
await knex.schema.alterTable("heartbeat", function (table) {
table.index([ "monitor_id", "time" ], "monitor_important_time_index", {
predicate: knex.whereRaw("important = 1")
table.index(["monitor_id", "time"], "monitor_important_time_index", {
predicate: knex.whereRaw("important = 1"),
});
table.index([ "important" ], "heartbeat_important_index", {
predicate: knex.whereRaw("important = 1")
table.index(["important"], "heartbeat_important_index", {
predicate: knex.whereRaw("important = 1"),
});
});
}
@@ -29,8 +29,8 @@ exports.down = async function (knex) {
await knex.raw("DROP INDEX IF EXISTS heartbeat_important_index");
await knex.schema.alterTable("heartbeat", function (table) {
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
table.index([ "important" ]);
table.index(["monitor_id", "important", "time"], "monitor_important_time_index");
table.index(["important"]);
});
}
// For MariaDB/MySQL: No changes

View File

@@ -1,14 +1,12 @@
// Change dns_last_result column from VARCHAR(255) to TEXT to handle longer DNS TXT records
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.text("dns_last_result").alter();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
return knex.schema.alterTable("monitor", function (table) {
table.string("dns_last_result", 255).alter();
});
};

View File

@@ -3,139 +3,139 @@
// Lookup table mapping v4 game IDs to v5 game IDs
const gameDig4to5IdMap = {
"americasarmypg": "aapg",
americasarmypg: "aapg",
"7d2d": "sdtd",
"as": "actionsource",
"ageofchivalry": "aoc",
"arkse": "ase",
"arcasimracing": "asr08",
"arma": "aaa",
"arma2oa": "a2oa",
"armacwa": "acwa",
"armar": "armaresistance",
"armare": "armareforger",
"armagetron": "armagetronadvanced",
"bat1944": "battalion1944",
"bf1942": "battlefield1942",
"bfv": "battlefieldvietnam",
"bf2": "battlefield2",
"bf2142": "battlefield2142",
"bfbc2": "bbc2",
"bf3": "battlefield3",
"bf4": "battlefield4",
"bfh": "battlefieldhardline",
"bd": "basedefense",
"bs": "bladesymphony",
"buildandshoot": "bas",
"cod4": "cod4mw",
"callofjuarez": "coj",
"chivalry": "cmw",
"commandos3": "c3db",
"cacrenegade": "cacr",
"contactjack": "contractjack",
"cs15": "counterstrike15",
"cs16": "counterstrike16",
"cs2": "counterstrike2",
"crossracing": "crce",
"darkesthour": "dhe4445",
"daysofwar": "dow",
"deadlydozenpt": "ddpt",
"dh2005": "deerhunter2005",
"dinodday": "ddd",
"dirttrackracing2": "dtr2",
"dmc": "deathmatchclassic",
"dnl": "dal",
"drakan": "dootf",
"dys": "dystopia",
"em": "empiresmod",
"empyrion": "egs",
"f12002": "formulaone2002",
"flashpointresistance": "ofr",
"fivem": "gta5f",
"forrest": "theforrest",
"graw": "tcgraw",
"graw2": "tcgraw2",
"giantscitizenkabuto": "gck",
"ges": "goldeneyesource",
"gore": "gus",
"hldm": "hld",
"hldms": "hlds",
"hlopfor": "hlof",
"hl2dm": "hl2d",
"hidden": "thehidden",
"had2": "hiddendangerous2",
"igi2": "i2cs",
"il2": "il2sturmovik",
"insurgencymic": "imic",
"isle": "theisle",
"jamesbondnightfire": "jb007n",
"jc2mp": "jc2m",
"jc3mp": "jc3m",
"kingpin": "kloc",
"kisspc": "kpctnc",
"kspdmp": "kspd",
"kzmod": "kreedzclimbing",
"left4dead": "l4d",
"left4dead2": "l4d2",
"m2mp": "m2m",
"mohsh": "mohaas",
"mohbt": "mohaab",
"mohab": "moha",
"moh2010": "moh",
"mohwf": "mohw",
"minecraftbe": "mbe",
"mtavc": "gtavcmta",
"mtasa": "gtasamta",
"ns": "naturalselection",
"ns2": "naturalselection2",
"nwn": "neverwinternights",
"nwn2": "neverwinternights2",
"nolf": "tonolf",
"nolf2": "nolf2asihw",
"pvkii": "pvak2",
"ps": "postscriptum",
"primalcarnage": "pce",
"pc": "projectcars",
"pc2": "projectcars2",
"prbf2": "prb2",
"przomboid": "projectzomboid",
"quake1": "quake",
"quake3": "q3a",
"ragdollkungfu": "rdkf",
"r6": "rainbowsix",
"r6roguespear": "rs2rs",
"r6ravenshield": "rs3rs",
"redorchestraost": "roo4145",
"redm": "rdr2r",
"riseofnations": "ron",
"rs2": "rs2v",
"samp": "gtasam",
"saomp": "gtasao",
"savage2": "s2ats",
"ss": "serioussam",
"ss2": "serioussam2",
"ship": "theship",
"sinep": "sinepisodes",
"sonsoftheforest": "sotf",
"swbf": "swb",
"swbf2": "swb2",
"swjk": "swjkja",
"swjk2": "swjk2jo",
"takeonhelicopters": "toh",
"tf2": "teamfortress2",
"terraria": "terrariatshock",
"tribes1": "t1s",
"ut": "unrealtournament",
"ut2003": "unrealtournament2003",
"ut2004": "unrealtournament2004",
"ut3": "unrealtournament3",
"v8supercar": "v8sc",
"vcmp": "vcm",
"vs": "vampireslayer",
"wheeloftime": "wot",
"wolfenstein2009": "wolfenstein",
"wolfensteinet": "wet",
"wurm": "wurmunlimited",
as: "actionsource",
ageofchivalry: "aoc",
arkse: "ase",
arcasimracing: "asr08",
arma: "aaa",
arma2oa: "a2oa",
armacwa: "acwa",
armar: "armaresistance",
armare: "armareforger",
armagetron: "armagetronadvanced",
bat1944: "battalion1944",
bf1942: "battlefield1942",
bfv: "battlefieldvietnam",
bf2: "battlefield2",
bf2142: "battlefield2142",
bfbc2: "bbc2",
bf3: "battlefield3",
bf4: "battlefield4",
bfh: "battlefieldhardline",
bd: "basedefense",
bs: "bladesymphony",
buildandshoot: "bas",
cod4: "cod4mw",
callofjuarez: "coj",
chivalry: "cmw",
commandos3: "c3db",
cacrenegade: "cacr",
contactjack: "contractjack",
cs15: "counterstrike15",
cs16: "counterstrike16",
cs2: "counterstrike2",
crossracing: "crce",
darkesthour: "dhe4445",
daysofwar: "dow",
deadlydozenpt: "ddpt",
dh2005: "deerhunter2005",
dinodday: "ddd",
dirttrackracing2: "dtr2",
dmc: "deathmatchclassic",
dnl: "dal",
drakan: "dootf",
dys: "dystopia",
em: "empiresmod",
empyrion: "egs",
f12002: "formulaone2002",
flashpointresistance: "ofr",
fivem: "gta5f",
forrest: "theforrest",
graw: "tcgraw",
graw2: "tcgraw2",
giantscitizenkabuto: "gck",
ges: "goldeneyesource",
gore: "gus",
hldm: "hld",
hldms: "hlds",
hlopfor: "hlof",
hl2dm: "hl2d",
hidden: "thehidden",
had2: "hiddendangerous2",
igi2: "i2cs",
il2: "il2sturmovik",
insurgencymic: "imic",
isle: "theisle",
jamesbondnightfire: "jb007n",
jc2mp: "jc2m",
jc3mp: "jc3m",
kingpin: "kloc",
kisspc: "kpctnc",
kspdmp: "kspd",
kzmod: "kreedzclimbing",
left4dead: "l4d",
left4dead2: "l4d2",
m2mp: "m2m",
mohsh: "mohaas",
mohbt: "mohaab",
mohab: "moha",
moh2010: "moh",
mohwf: "mohw",
minecraftbe: "mbe",
mtavc: "gtavcmta",
mtasa: "gtasamta",
ns: "naturalselection",
ns2: "naturalselection2",
nwn: "neverwinternights",
nwn2: "neverwinternights2",
nolf: "tonolf",
nolf2: "nolf2asihw",
pvkii: "pvak2",
ps: "postscriptum",
primalcarnage: "pce",
pc: "projectcars",
pc2: "projectcars2",
prbf2: "prb2",
przomboid: "projectzomboid",
quake1: "quake",
quake3: "q3a",
ragdollkungfu: "rdkf",
r6: "rainbowsix",
r6roguespear: "rs2rs",
r6ravenshield: "rs3rs",
redorchestraost: "roo4145",
redm: "rdr2r",
riseofnations: "ron",
rs2: "rs2v",
samp: "gtasam",
saomp: "gtasao",
savage2: "s2ats",
ss: "serioussam",
ss2: "serioussam2",
ship: "theship",
sinep: "sinepisodes",
sonsoftheforest: "sotf",
swbf: "swb",
swbf2: "swb2",
swjk: "swjkja",
swjk2: "swjk2jo",
takeonhelicopters: "toh",
tf2: "teamfortress2",
terraria: "terrariatshock",
tribes1: "t1s",
ut: "unrealtournament",
ut2003: "unrealtournament2003",
ut2004: "unrealtournament2004",
ut3: "unrealtournament3",
v8supercar: "v8sc",
vcmp: "vcm",
vs: "vampireslayer",
wheeloftime: "wot",
wolfenstein2009: "wolfenstein",
wolfensteinet: "wet",
wurm: "wurmunlimited",
};
/**
@@ -146,10 +146,7 @@ const gameDig4to5IdMap = {
exports.up = async function (knex) {
await knex.transaction(async (trx) => {
// Get all monitors that use the gamedig type
const monitors = await trx("monitor")
.select("id", "game")
.where("type", "gamedig")
.whereNotNull("game");
const monitors = await trx("monitor").select("id", "game").where("type", "gamedig").whereNotNull("game");
// Update each monitor with the new game ID if it needs migration
for (const monitor of monitors) {
@@ -157,9 +154,7 @@ exports.up = async function (knex) {
const newGameId = gameDig4to5IdMap[oldGameId];
if (newGameId) {
await trx("monitor")
.where("id", monitor.id)
.update({ game: newGameId });
await trx("monitor").where("id", monitor.id).update({ game: newGameId });
}
}
});
@@ -172,16 +167,11 @@ exports.up = async function (knex) {
*/
exports.down = async function (knex) {
// Create reverse mapping from the same LUT
const gameDig5to4IdMap = Object.fromEntries(
Object.entries(gameDig4to5IdMap).map(([ v4, v5 ]) => [ v5, v4 ])
);
const gameDig5to4IdMap = Object.fromEntries(Object.entries(gameDig4to5IdMap).map(([v4, v5]) => [v5, v4]));
await knex.transaction(async (trx) => {
// Get all monitors that use the gamedig type
const monitors = await trx("monitor")
.select("id", "game")
.where("type", "gamedig")
.whereNotNull("game");
const monitors = await trx("monitor").select("id", "game").where("type", "gamedig").whereNotNull("game");
// Revert each monitor back to the old game ID if it was migrated
for (const monitor of monitors) {
@@ -189,9 +179,7 @@ exports.down = async function (knex) {
const oldGameId = gameDig5to4IdMap[newGameId];
if (oldGameId) {
await trx("monitor")
.where("id", monitor.id)
.update({ game: oldGameId });
await trx("monitor").where("id", monitor.id).update({ game: oldGameId });
}
}
});

View File

@@ -11,13 +11,9 @@ https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Template
```js
exports.up = function(knex) {
exports.up = function (knex) {};
};
exports.down = function(knex) {
};
exports.down = function (knex) {};
// exports.config = { transaction: false };
```
@@ -27,18 +23,19 @@ exports.down = function(knex) {
Filename: 2023-06-30-1348-create-user-and-product.js
```js
exports.up = function(knex) {
exports.up = function (knex) {
return knex.schema
.createTable('user', function (table) {
table.increments('id');
table.string('first_name', 255).notNullable();
table.string('last_name', 255).notNullable();
.createTable("user", function (table) {
table.increments("id");
table.string("first_name", 255).notNullable();
table.string("last_name", 255).notNullable();
})
.createTable('product', function (table) {
table.increments('id');
table.decimal('price').notNullable();
table.string('name', 1000).notNullable();
}).then(() => {
.createTable("product", function (table) {
table.increments("id");
table.decimal("price").notNullable();
table.string("name", 1000).notNullable();
})
.then(() => {
knex("products").insert([
{ price: 10, name: "Apple" },
{ price: 20, name: "Orange" },
@@ -46,10 +43,8 @@ exports.up = function(knex) {
});
};
exports.down = function(knex) {
return knex.schema
.dropTable("product")
.dropTable("user");
exports.down = function (knex) {
return knex.schema.dropTable("product").dropTable("user");
};
```

View File

@@ -1,4 +1,4 @@
version: '3.8'
version: "3.8"
services:
uptime-kuma:
@@ -11,4 +11,3 @@ services:
ports:
- "3001:3001" # <Host Port>:<Container Port>
- "3307:3306"

View File

@@ -1,6 +1,8 @@
module.exports = {
apps: [{
apps: [
{
name: "uptime-kuma",
script: "./server/server.js",
}]
},
],
};

View File

@@ -16,27 +16,26 @@ if (!version || !version.includes("-beta.")) {
const exists = tagExists(version);
if (! exists) {
if (!exists) {
// Process package.json
pkg.version = version;
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const resultVersion = childProcess.spawnSync(npm, [ "--no-git-tag-version", "version", version ], { shell: true });
const resultVersion = childProcess.spawnSync(npm, ["--no-git-tag-version", "version", version], { shell: true });
if (resultVersion.error) {
console.error(resultVersion.error);
console.error("error npm version!");
process.exit(1);
}
const resultInstall = childProcess.spawnSync(npm, [ "install" ], { shell: true });
const resultInstall = childProcess.spawnSync(npm, ["install"], { shell: true });
if (resultInstall.error) {
console.error(resultInstall.error);
console.error("error update package-lock!");
process.exit(1);
}
commit(version);
} else {
console.log("version tag exists, please delete the tag or use another tag");
process.exit(1);
@@ -51,7 +50,7 @@ if (! exists) {
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let res = childProcess.spawnSync("git", ["commit", "-m", msg, "-a"]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
@@ -59,7 +58,7 @@ function commit(version) {
throw new Error("commit error");
}
res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
res = childProcess.spawnSync("git", ["push", "origin", "master"]);
console.log(res.stdout.toString().trim());
}
@@ -70,11 +69,11 @@ function commit(version) {
* @throws Version is not valid
*/
function tagExists(version) {
if (! version) {
if (!version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
let res = childProcess.spawnSync("git", ["tag", "-l", version]);
return res.stdout.toString().trim() === version;
}

View File

@@ -14,7 +14,9 @@ if (platform === "linux/arm/v7") {
console.log("Already built in the host, skip.");
process.exit(0);
} else {
console.log("prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build.");
console.log(
"prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build."
);
}
} else {
if (fs.existsSync("./extra/healthcheck-armv7")) {
@@ -24,4 +26,3 @@ if (platform === "linux/arm/v7") {
const output = childProcess.execSync("go build -x -o ./extra/healthcheck ./extra/healthcheck.go").toString("utf8");
console.log(output);

View File

@@ -18,7 +18,7 @@ const github = require("@actions/github");
await client.issues.listLabelsOnIssue({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number
issue_number: issue.number,
})
).data.map(({ name }) => name);
@@ -29,7 +29,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
labels: [ "invalid-format" ]
labels: ["invalid-format"],
});
// Add the issue closing comment
@@ -37,7 +37,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context necessary for a good discussions.`
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context necessary for a good discussions.`,
});
// Close the issue
@@ -45,7 +45,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
state: "closed"
state: "closed",
});
} else {
console.log("Pass!");
@@ -53,5 +53,4 @@ const github = require("@actions/github");
} catch (e) {
console.log(e);
}
})();

View File

@@ -1,8 +1,7 @@
require("dotenv").config();
const { NodeSSH } = require("node-ssh");
const readline = require("readline");
const rl = readline.createInterface({ input: process.stdin,
output: process.stdout });
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
(async () => {
@@ -13,7 +12,7 @@ const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
host: process.env.UPTIME_KUMA_DEMO_HOST,
port: process.env.UPTIME_KUMA_DEMO_PORT,
username: process.env.UPTIME_KUMA_DEMO_USERNAME,
privateKeyPath: process.env.UPTIME_KUMA_DEMO_PRIVATE_KEY_PATH
privateKeyPath: process.env.UPTIME_KUMA_DEMO_PRIVATE_KEY_PATH,
});
let cwd = process.env.UPTIME_KUMA_DEMO_CWD;
@@ -48,7 +47,6 @@ const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
cwd,
});
console.log(result.stdout + result.stderr);*/
} catch (e) {
console.log(e);
} finally {

View File

@@ -26,7 +26,6 @@ function download(url) {
console.log("Extracting dist...");
if (fs.existsSync("./dist")) {
if (fs.existsSync("./dist-backup")) {
fs.rmSync("./dist-backup", {
recursive: true,

View File

@@ -1,6 +1,6 @@
// Supports: Deno, Bun, Node.js >= 18 (ts-node)
const pushURL : string = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval : number = 60;
const pushURL: string = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval: number = 60;
const push = async () => {
await fetch(pushURL);

View File

@@ -8,7 +8,7 @@ async function main() {
const branch = process.argv[2];
// Use gh to get current branch's pr id
let currentBranchPRID = execSync("gh pr view --json number --jq \".number\"").toString().trim();
let currentBranchPRID = execSync('gh pr view --json number --jq ".number"').toString().trim();
console.log("Pr ID: ", currentBranchPRID);
// Use gh commend to get pr commits

View File

@@ -8,7 +8,7 @@ const TwoFA = require("../server/2fa");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
output: process.stdout,
});
const main = async () => {
@@ -19,7 +19,7 @@ const main = async () => {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
if (!user) {
throw new Error("user not found, have you installed?");
}
@@ -31,7 +31,6 @@ const main = async () => {
await TwoFA.disable2FA(user.id);
console.log("2FA has been removed successfully.");
}
}
} catch (e) {
console.error("Error: " + e.message);

View File

@@ -21,4 +21,3 @@ const main = async () => {
};
main();

View File

@@ -12,7 +12,7 @@ const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
output: process.stdout,
});
const main = async () => {
@@ -28,7 +28,7 @@ const main = async () => {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (! user) {
if (!user) {
throw new Error("user not found, have you installed?");
}
@@ -41,7 +41,10 @@ const main = async () => {
// When called with "--new-password" argument for unattended modification (e.g. npm run reset-password -- --new_password=secret)
if ("new-password" in args) {
console.log("Using password from argument");
console.warn("\x1b[31m%s\x1b[0m", "Warning: the password might be stored, in plain text, in your shell's history");
console.warn(
"\x1b[31m%s\x1b[0m",
"Warning: the password might be stored, in plain text, in your shell's history"
);
password = confirmPassword = args["new-password"] + "";
if (passwordStrength(password).value === "Too weak") {
throw new Error("Password is too weak, please use a stronger password.");
@@ -71,7 +74,6 @@ const main = async () => {
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
@@ -112,10 +114,13 @@ function disconnectAllSocketClients(username, password) {
timeout: 5000,
});
socket.on("connect", () => {
socket.emit("login", {
socket.emit(
"login",
{
username,
password,
}, (res) => {
},
(res) => {
if (res.ok) {
console.log("Logged in.");
socket.emit("disconnectOtherSocketClients");
@@ -124,7 +129,8 @@ function disconnectAllSocketClients(username, password) {
console.warn("Please restart the server to disconnect all sessions.");
}
socket.close();
});
}
);
});
socket.on("connect_error", function () {

View File

@@ -7,7 +7,7 @@ const dns2 = require("dns2");
const { Packet } = dns2;
const server = dns2.createServer({
udp: true
udp: true,
});
server.on("request", (request, send, rinfo) => {
@@ -17,14 +17,13 @@ server.on("request", (request, send, rinfo) => {
const response = Packet.createResponseFromRequest(request);
if (question.name === "existing.com") {
if (question.type === Packet.TYPE.A) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "1.2.3.4"
address: "1.2.3.4",
});
} else if (question.type === Packet.TYPE.AAAA) {
response.answers.push({
@@ -49,7 +48,7 @@ server.on("request", (request, send, rinfo) => {
class: question.class,
ttl: 300,
exchange: "mx1.existing.com",
priority: 5
priority: 5,
});
} else if (question.type === Packet.TYPE.NS) {
response.answers.push({
@@ -103,7 +102,6 @@ server.on("request", (request, send, rinfo) => {
value: "ca.existing.com",
});
}
}
if (question.name === "4.3.2.1.in-addr.arpa") {
@@ -132,7 +130,7 @@ server.on("close", () => {
});
server.listen({
udp: 5300
udp: 5300,
});
/**

View File

@@ -41,17 +41,19 @@ server1.aedes.on("subscribe", (subscriptions, client) => {
for (let s of subscriptions) {
if (s.topic === "test") {
server1.aedes.publish({
server1.aedes.publish(
{
topic: "test",
payload: Buffer.from("ok"),
}, (error) => {
},
(error) => {
if (error) {
log.error("mqtt_server", error);
}
});
}
);
}
}
});
server1.start();

View File

@@ -10,7 +10,7 @@ let lines = file.split("\n");
lines = lines.filter((line) => line !== "");
// Remove duplicates
lines = [ ...new Set(lines) ];
lines = [...new Set(lines)];
// Remove @weblate and @UptimeKumaBot
lines = lines.filter((line) => line !== "@weblate" && line !== "@UptimeKumaBot" && line !== "@louislam");

View File

@@ -54,13 +54,13 @@ async function updateLanguage(langCode, baseLangCode) {
} else {
console.log("Empty file");
obj = {
languageName: "<Your Language name in your language (not in English)>"
languageName: "<Your Language name in your language (not in English)>",
};
}
// En first
for (const key in en) {
if (! obj[key]) {
if (!obj[key]) {
obj[key] = en[key];
}
}
@@ -68,13 +68,15 @@ async function updateLanguage(langCode, baseLangCode) {
if (baseLang !== en) {
// Base second
for (const key in baseLang) {
if (! obj[key]) {
if (!obj[key]) {
obj[key] = key;
}
}
}
const code = "export default " + util.inspect(obj, {
const code =
"export default " +
util.inspect(obj, {
depth: null,
});

View File

@@ -9,15 +9,14 @@ const newVersion = process.env.RELEASE_VERSION;
console.log("New Version: " + newVersion);
if (! newVersion) {
if (!newVersion) {
console.error("invalid version");
process.exit(1);
}
const exists = tagExists(newVersion);
if (! exists) {
if (!exists) {
// Process package.json
pkg.version = newVersion;
@@ -27,20 +26,19 @@ if (! exists) {
// Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const resultVersion = childProcess.spawnSync(npm, [ "--no-git-tag-version", "version", newVersion ], { shell: true });
const resultVersion = childProcess.spawnSync(npm, ["--no-git-tag-version", "version", newVersion], { shell: true });
if (resultVersion.error) {
console.error(resultVersion.error);
console.error("error npm version!");
process.exit(1);
}
const resultInstall = childProcess.spawnSync(npm, [ "install" ], { shell: true });
const resultInstall = childProcess.spawnSync(npm, ["install"], { shell: true });
if (resultInstall.error) {
console.error(resultInstall.error);
console.error("error update package-lock!");
process.exit(1);
}
commit(newVersion);
} else {
console.log("version exists");
}
@@ -54,7 +52,7 @@ if (! exists) {
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let res = childProcess.spawnSync("git", ["commit", "-m", msg, "-a"]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
@@ -70,11 +68,11 @@ function commit(version) {
* @throws Version is not valid
*/
function tagExists(version) {
if (! version) {
if (!version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
let res = childProcess.spawnSync("git", ["tag", "-l", version]);
return res.stdout.toString().trim() === version;
}

View File

@@ -21,23 +21,23 @@ function updateWiki(newVersion) {
safeDelete(wikiDir);
childProcess.spawnSync("git", [ "clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir ]);
childProcess.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
let content = fs.readFileSync(howToUpdateFilename).toString();
// Replace the version: https://regex101.com/r/hmj2Bc/1
content = content.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
childProcess.spawnSync("git", [ "add", "-A" ], {
childProcess.spawnSync("git", ["add", "-A"], {
cwd: wikiDir,
});
childProcess.spawnSync("git", [ "commit", "-m", `Update to ${newVersion}` ], {
childProcess.spawnSync("git", ["commit", "-m", `Update to ${newVersion}`], {
cwd: wikiDir,
});
console.log("Pushing to Github");
childProcess.spawnSync("git", [ "push" ], {
childProcess.spawnSync("git", ["push"], {
cwd: wikiDir,
});

View File

@@ -9,16 +9,16 @@ if (!platform) {
const supportedPlatforms = [
{
name: "linux/amd64",
bin: "./build/uptime-kuma-push-amd64"
bin: "./build/uptime-kuma-push-amd64",
},
{
name: "linux/arm64",
bin: "./build/uptime-kuma-push-arm64"
bin: "./build/uptime-kuma-push-arm64",
},
{
name: "linux/arm/v7",
bin: "./build/uptime-kuma-push-armv7"
}
bin: "./build/uptime-kuma-push-armv7",
},
];
let platformObj = null;
@@ -45,4 +45,3 @@ if (platformObj) {
console.error("Unsupported platform: " + platform);
process.exit(1);
}

View File

@@ -16,7 +16,7 @@
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"fmt": "prettier --write \"**/*.{js,ts,vue,css,scss,json,md,yml,yaml}\" --ignore-path .gitignore",
"fmt": "prettier --write \"**/*.{js,ts,vue,css,scss,json,md,yml,yaml}\"",
"lint:prod": "npm run lint:js-prod && npm run lint:style",
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",

View File

@@ -1,18 +1,14 @@
const { R } = require("redbean-node");
class TwoFA {
/**
* Disable 2FA for specified user
* @param {number} userID ID of user to disable
* @returns {Promise<void>}
*/
static async disable2FA(userID) {
return await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [
userID,
]);
return await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [userID]);
}
}
module.exports = TwoFA;

View File

@@ -16,7 +16,10 @@ function getAnalyticsScript(statusPage) {
case "umami":
return umamiAnalytics.getUmamiAnalyticsScript(statusPage.analyticsScriptUrl, statusPage.analyticsId);
case "plausible":
return plausibleAnalytics.getPlausibleAnalyticsScript(statusPage.analyticsScriptUrl, statusPage.analyticsId);
return plausibleAnalytics.getPlausibleAnalyticsScript(
statusPage.analyticsScriptUrl,
statusPage.analyticsId
);
case "matomo":
return matomoAnalytics.getMatomoAnalyticsScript(statusPage.analyticsScriptUrl, statusPage.analyticsId);
default:
@@ -44,5 +47,5 @@ function isValidAnalyticsConfig(statusPage) {
module.exports = {
getAnalyticsScript,
isValidAnalyticsConfig
isValidAnalyticsConfig,
};

View File

@@ -32,5 +32,5 @@ function getPlausibleAnalyticsScript(scriptUrl, domainsToMonitor) {
}
module.exports = {
getPlausibleAnalyticsScript
getPlausibleAnalyticsScript,
};

View File

@@ -18,9 +18,7 @@ exports.login = async function (username, password) {
return null;
}
let user = await R.findOne("user", "TRIM(username) = ? AND active = 1 ", [
username.trim(),
]);
let user = await R.findOne("user", "TRIM(username) = ? AND active = 1 ", [username.trim()]);
if (user && passwordHash.verify(password, user.password)) {
// Upgrade the hash to bcrypt
@@ -50,7 +48,7 @@ async function verifyAPIKey(key) {
let index = key.substring(2, key.indexOf("_"));
let clear = key.substring(key.indexOf("_") + 1, key.length);
let hash = await R.findOne("api_key", " id=? ", [ index ]);
let hash = await R.findOne("api_key", " id=? ", [index]);
if (hash === null) {
return false;
@@ -156,7 +154,7 @@ exports.basicAuth = async function (req, res, next) {
* @returns {Promise<void>}
*/
exports.apiAuth = async function (req, res, next) {
if (!await Settings.get("disableAuth")) {
if (!(await Settings.get("disableAuth"))) {
let usingAPIKeys = await Settings.get("apiKeysEnabled");
let middleware;
if (usingAPIKeys) {

View File

@@ -14,7 +14,7 @@ let interval;
exports.startInterval = () => {
let check = async () => {
if (await setting("checkUpdate") === false) {
if ((await setting("checkUpdate")) === false) {
return;
}
@@ -40,11 +40,9 @@ exports.startInterval = () => {
if (res.data.slow) {
exports.latestVersion = res.data.slow;
}
} catch (_) {
log.info("update-checker", "Failed to check for new versions");
}
};
check();

View File

@@ -19,14 +19,12 @@ async function sendNotificationList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("notification", " user_id = ? ", [
socket.userID,
]);
let list = await R.find("notification", " user_id = ? ", [socket.userID]);
for (let bean of list) {
let notificationObject = bean.export();
notificationObject.isDefault = (notificationObject.isDefault === 1);
notificationObject.active = (notificationObject.active === 1);
notificationObject.isDefault = notificationObject.isDefault === 1;
notificationObject.active = notificationObject.active === 1;
result.push(notificationObject);
}
@@ -46,14 +44,15 @@ async function sendNotificationList(socket) {
* @returns {Promise<void>}
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
let list = await R.getAll(`
let list = await R.getAll(
`
SELECT * FROM heartbeat
WHERE monitor_id = ?
ORDER BY time DESC
LIMIT 100
`, [
monitorID,
]);
`,
[monitorID]
);
let result = list.reverse();
@@ -75,14 +74,16 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.find("heartbeat", `
let list = await R.find(
"heartbeat",
`
monitor_id = ?
AND important = 1
ORDER BY time DESC
LIMIT 500
`, [
monitorID,
]);
`,
[monitorID]
);
timeLogger.print(`[Monitor: ${monitorID}] sendImportantHeartbeatList`);
@@ -91,7 +92,6 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
} else {
socket.emit("importantHeartbeatList", monitorID, list, overwrite);
}
}
/**
@@ -102,8 +102,11 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
async function sendProxyList(socket) {
const timeLogger = new TimeLogger();
const list = await R.find("proxy", " user_id = ? ", [ socket.userID ]);
io.to(socket.userID).emit("proxyList", list.map(bean => bean.export()));
const list = await R.find("proxy", " user_id = ? ", [socket.userID]);
io.to(socket.userID).emit(
"proxyList",
list.map((bean) => bean.export())
);
timeLogger.print("Send Proxy List");
@@ -119,11 +122,7 @@ async function sendAPIKeyList(socket) {
const timeLogger = new TimeLogger();
let result = [];
const list = await R.find(
"api_key",
"user_id=?",
[ socket.userID ],
);
const list = await R.find("api_key", "user_id=?", [socket.userID]);
for (let bean of list) {
result.push(bean.toPublicJSON());
@@ -150,7 +149,7 @@ async function sendInfo(socket, hideVersion = false) {
if (!hideVersion) {
info.version = checkVersion.version;
info.latestVersion = checkVersion.latestVersion;
info.isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
info.isContainer = process.env.UPTIME_KUMA_IS_CONTAINER === "1";
info.dbType = Database.dbConfig.type;
info.runtime = {
platform: process.platform, // linux or win32
@@ -170,9 +169,7 @@ async function sendDockerHostList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("docker_host", " user_id = ? ", [
socket.userID,
]);
let list = await R.find("docker_host", " user_id = ? ", [socket.userID]);
for (let bean of list) {
result.push(bean.toJSON());
@@ -194,9 +191,7 @@ async function sendRemoteBrowserList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("remote_browser", " user_id = ? ", [
socket.userID,
]);
let list = await R.find("remote_browser", " user_id = ? ", [socket.userID]);
for (let bean of list) {
result.push(bean.toJSON());
@@ -215,13 +210,15 @@ async function sendRemoteBrowserList(socket) {
* @returns {Promise<void>}
*/
async function sendMonitorTypeList(socket) {
const result = Object.entries(UptimeKumaServer.monitorTypeList).map(([ key, type ]) => {
return [ key, {
const result = Object.entries(UptimeKumaServer.monitorTypeList).map(([key, type]) => {
return [
key,
{
supportsConditions: type.supportsConditions,
conditionVariables: type.conditionVariables.map(v => {
conditionVariables: type.conditionVariables.map((v) => {
return {
id: v.id,
operators: v.operators.map(o => {
operators: v.operators.map((o) => {
return {
id: o.id,
caption: o.caption,
@@ -229,7 +226,8 @@ async function sendMonitorTypeList(socket) {
}),
};
}),
}];
},
];
});
io.to(socket.userID).emit("monitorTypeList", Object.fromEntries(result));

View File

@@ -1,7 +1,7 @@
const isFreeBSD = /^freebsd/.test(process.platform);
// Interop with browser
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
const args = typeof process !== "undefined" ? require("args-parser")(process.argv) : {};
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
@@ -9,13 +9,17 @@ const args = (typeof process !== "undefined") ? require("args-parser")(process.a
let hostEnv = isFreeBSD ? null : process.env.HOST;
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
.map(portValue => parseInt(portValue))
.find(portValue => !isNaN(portValue));
const port = [args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001]
.map((portValue) => parseInt(portValue))
.find((portValue) => !isNaN(portValue));
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
const sslKeyPassphrase =
args["ssl-key-passphrase"] ||
process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE ||
process.env.SSL_KEY_PASSPHRASE ||
undefined;
const isSSL = sslKey && sslCert;

View File

@@ -18,7 +18,6 @@ const SqlString = require("sqlstring");
* Database & App Data Folder
*/
class Database {
/**
* Bootstrap database for SQLite
* @type {string}
@@ -89,7 +88,7 @@ class Database {
"patch-added-mqtt-monitor.sql": true,
"patch-add-clickable-status-page-link.sql": true,
"patch-add-sqlserver-monitor.sql": true,
"patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] },
"patch-add-other-auth.sql": { parents: ["patch-monitor-basic-auth.sql"] },
"patch-grpc-monitor.sql": true,
"patch-add-radius-monitor.sql": true,
"patch-monitor-add-resend-interval.sql": true,
@@ -138,24 +137,24 @@ class Database {
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.sqlitePath = path.join(Database.dataDir, "kuma.db");
if (! fs.existsSync(Database.dataDir)) {
if (!fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = path.join(Database.dataDir, "upload/");
if (! fs.existsSync(Database.uploadDir)) {
if (!fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
// Create screenshot dir
Database.screenshotDir = path.join(Database.dataDir, "screenshots/");
if (! fs.existsSync(Database.screenshotDir)) {
if (!fs.existsSync(Database.screenshotDir)) {
fs.mkdirSync(Database.screenshotDir, { recursive: true });
}
Database.dockerTLSDir = path.join(Database.dataDir, "docker-tls/");
if (! fs.existsSync(Database.dockerTLSDir)) {
if (!fs.existsSync(Database.dockerTLSDir)) {
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
}
@@ -228,13 +227,22 @@ class Database {
if (!process.env.UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS) {
parsedMaxPoolConnections = 10;
} else if (Number.isNaN(parsedMaxPoolConnections)) {
log.warn("db", "Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was invalid.");
log.warn(
"db",
"Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was invalid."
);
parsedMaxPoolConnections = 10;
} else if (parsedMaxPoolConnections < 1) {
log.warn("db", "Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was less than 1.");
log.warn(
"db",
"Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was less than 1."
);
parsedMaxPoolConnections = 10;
} else if (parsedMaxPoolConnections > 100) {
log.warn("db", "Max database connections capped to 100 because Mysql/Mariadb connections are heavy. consider using a proxy like ProxySQL or MaxScale.");
log.warn(
"db",
"Max database connections capped to 100 because Mysql/Mariadb connections are heavy. consider using a proxy like ProxySQL or MaxScale."
);
parsedMaxPoolConnections = 100;
}
@@ -247,8 +255,7 @@ class Database {
log.info("db", `Database Type: ${dbConfig.type}`);
if (dbConfig.type === "sqlite") {
if (! fs.existsSync(Database.sqlitePath)) {
if (!fs.existsSync(Database.sqlitePath)) {
log.info("server", "Copying Database");
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
}
@@ -269,7 +276,7 @@ class Database {
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
}
},
};
} else if (dbConfig.type === "mariadb") {
const connection = await mysql.createConnection({
@@ -387,7 +394,7 @@ class Database {
log.debug("db", "SQLite config:");
log.debug("db", await R.getAll("PRAGMA journal_mode"));
log.debug("db", await R.getAll("PRAGMA cache_size"));
log.debug("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
log.debug("db", "SQLite Version: " + (await R.getCell("SELECT sqlite_version()")));
}
}
@@ -439,7 +446,6 @@ class Database {
}
await this.migrateAggregateTable(port, hostname);
} catch (e) {
// Allow missing patch files for downgrade or testing pr.
if (e.message.includes("the following files are missing:")) {
@@ -456,9 +462,7 @@ class Database {
* TODO
* @returns {Promise<void>}
*/
static async rollbackLatestPatch() {
}
static async rollbackLatestPatch() {}
/**
* Patch the database for SQLite
@@ -468,7 +472,7 @@ class Database {
static async patchSqlite() {
let version = parseInt(await setting("database_version"));
if (! version) {
if (!version) {
version = 0;
}
@@ -498,7 +502,10 @@ class Database {
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
log.error(
"db",
"Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues"
);
process.exit(1);
}
@@ -519,7 +526,7 @@ class Database {
log.debug("db", "Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
if (! databasePatchedFiles) {
if (!databasePatchedFiles) {
databasePatchedFiles = {};
}
@@ -534,13 +541,15 @@ class Database {
if (this.patched) {
log.info("db", "Database Patched Successfully");
}
} catch (ex) {
await Database.close();
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error("db", "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
log.error(
"db",
"Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues"
);
process.exit(1);
}
@@ -554,7 +563,6 @@ class Database {
* @returns {Promise<void>}
*/
static async migrateNewStatusPage() {
// Fix 1.13.0 empty slug bug
await R.exec("UPDATE status_page SET slug = 'empty-slug-recover' WHERE TRIM(slug) = ''");
@@ -576,9 +584,9 @@ class Database {
statusPage.description = await setting("description");
statusPage.icon = await setting("icon");
statusPage.theme = await setting("statusPageTheme");
statusPage.published = !!await setting("statusPagePublished");
statusPage.search_engine_index = !!await setting("searchEngineIndex");
statusPage.show_tags = !!await setting("statusPageTags");
statusPage.published = !!(await setting("statusPagePublished"));
statusPage.search_engine_index = !!(await setting("searchEngineIndex"));
statusPage.show_tags = !!(await setting("statusPageTags"));
statusPage.password = null;
if (!statusPage.title) {
@@ -595,13 +603,9 @@ class Database {
let id = await R.store(statusPage);
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [id]);
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [id]);
await R.exec("DELETE FROM setting WHERE type = 'statusPage'");
@@ -614,7 +618,6 @@ class Database {
console.log("Migrating Status Page - Done");
}
}
/**
@@ -628,13 +631,13 @@ class Database {
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
let value = this.patchList[sqlFilename];
if (! value) {
if (!value) {
log.info("db", sqlFilename + " skip");
return;
}
// Check if patched
if (! databasePatchedFiles[sqlFilename]) {
if (!databasePatchedFiles[sqlFilename]) {
log.info("db", sqlFilename + " is not patched");
if (value.parents) {
@@ -649,7 +652,6 @@ class Database {
await this.importSQLFile("./db/old_migrations/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
log.info("db", sqlFilename + " was patched successfully");
} else {
log.debug("db", sqlFilename + " is already patched, skip");
}
@@ -669,14 +671,15 @@ class Database {
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return ! line.startsWith("--");
return !line.startsWith("--");
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n");
let statements = text.split(";")
let statements = text
.split(";")
.map((statement) => {
return statement.trim();
})
@@ -773,7 +776,10 @@ class Database {
// Add a setting for 2.0.0-dev users to skip this migration
if (process.env.SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE === "1") {
log.warn("db", "SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE is set to 1, skipping aggregate table migration forever (for 2.0.0-dev users)");
log.warn(
"db",
"SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE is set to 1, skipping aggregate table migration forever (for 2.0.0-dev users)"
);
await Settings.set("migrateAggregateTableState", "migrated");
}
@@ -813,11 +819,14 @@ class Database {
`);
// Stop if stat_* tables are not empty
for (let table of [ "stat_minutely", "stat_hourly", "stat_daily" ]) {
for (let table of ["stat_minutely", "stat_hourly", "stat_daily"]) {
let countResult = await R.getRow(`SELECT COUNT(*) AS count FROM ${table}`);
let count = countResult.count;
if (count > 0) {
log.warn("db", `Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`);
log.warn(
"db",
`Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`
);
await migrationServer?.stop();
return;
}
@@ -826,31 +835,35 @@ class Database {
await Settings.set("migrateAggregateTableState", "migrating");
let progressPercent = 0;
for (const [ i, monitor ] of monitors.entries()) {
for (const [i, monitor] of monitors.entries()) {
// Get a list of unique dates from the heartbeat table, using raw sql
let dates = await R.getAll(`
let dates = await R.getAll(
`
SELECT DISTINCT DATE(time) AS date
FROM heartbeat
WHERE monitor_id = ?
ORDER BY date ASC
`, [
monitor.monitor_id
]);
`,
[monitor.monitor_id]
);
for (const [ dateIndex, date ] of dates.entries()) {
for (const [dateIndex, date] of dates.entries()) {
// New Uptime Calculator
let calculator = new UptimeCalculator();
calculator.monitorID = monitor.monitor_id;
calculator.setMigrationMode(true);
// Get all the heartbeats for this monitor and date
let heartbeats = await R.getAll(`
let heartbeats = await R.getAll(
`
SELECT status, ping, time
FROM heartbeat
WHERE monitor_id = ?
AND DATE(time) = ?
ORDER BY time ASC
`, [ monitor.monitor_id, date.date ]);
`,
[monitor.monitor_id, date.date]
);
if (heartbeats.length > 0) {
msg = `[DON'T STOP] Migrating monitor ${monitor.monitor_id}s' (${i + 1} of ${monitors.length} total) data - ${date.date} - total migration progress ${progressPercent.toFixed(2)}%`;
@@ -863,7 +876,7 @@ class Database {
}
// Calculate progress: (current_monitor_index + relative_date_progress) / total_monitors
progressPercent = (i + (dateIndex + 1) / dates.length) / monitors.length * 100;
progressPercent = ((i + (dateIndex + 1) / dates.length) / monitors.length) * 100;
// Lazy to fix the floating point issue, it is acceptable since it is just a progress bar
if (progressPercent > 100) {
@@ -900,7 +913,8 @@ class Database {
if (detailedLog) {
log.info("db", "Deleting non-important heartbeats for monitor " + monitor.id);
}
await R.exec(`
await R.exec(
`
DELETE FROM heartbeat
WHERE monitor_id = ?
AND important = 0
@@ -914,15 +928,11 @@ class Database {
LIMIT ?
) AS limited_ids
)
`, [
monitor.id,
-24,
monitor.id,
100,
]);
`,
[monitor.id, -24, monitor.id, 100]
);
}
}
}
module.exports = Database;

View File

@@ -7,7 +7,6 @@ const Database = require("./database");
const { axiosAbortSignal, fsExists } = require("./util-server");
class DockerHost {
static CertificateFileNameCA = "ca.pem";
static CertificateFileNameCert = "cert.pem";
static CertificateFileNameKey = "key.pem";
@@ -23,12 +22,11 @@ class DockerHost {
let bean;
if (dockerHostID) {
bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [ dockerHostID, userID ]);
bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [dockerHostID, userID]);
if (!bean) {
throw new Error("docker host not found");
}
} else {
bean = R.dispense("docker_host");
}
@@ -50,14 +48,14 @@ class DockerHost {
* @returns {Promise<void>}
*/
static async delete(dockerHostID, userID) {
let bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [ dockerHostID, userID ]);
let bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [dockerHostID, userID]);
if (!bean) {
throw new Error("docker host not found");
}
// Delete removed proxy from monitors if exists
await R.exec("UPDATE monitor SET docker_host = null WHERE docker_host = ?", [ dockerHostID ]);
await R.exec("UPDATE monitor SET docker_host = null WHERE docker_host = ?", [dockerHostID]);
await R.trash(bean);
}
@@ -72,7 +70,7 @@ class DockerHost {
url: "/containers/json?all=true",
timeout: 5000,
headers: {
"Accept": "*/*",
Accept: "*/*",
},
signal: axiosAbortSignal(6000),
};
@@ -81,26 +79,24 @@ class DockerHost {
options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") {
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
options.httpsAgent = new https.Agent(await DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
options.httpsAgent = new https.Agent(
await DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL)
);
}
try {
let res = await axios.request(options);
if (Array.isArray(res.data)) {
if (res.data.length > 1) {
if ("ImageID" in res.data[0]) {
return res.data.length;
} else {
throw new Error("Invalid Docker response, is it Docker really a daemon?");
}
} else {
return res.data.length;
}
} else {
throw new Error("Invalid Docker response, is it Docker really a daemon?");
}
@@ -146,30 +142,35 @@ class DockerHost {
static async getHttpsAgentOptions(dockerType, url) {
let baseOptions = {
maxCachedSessions: 0,
rejectUnauthorized: true
rejectUnauthorized: true,
};
let certOptions = {};
let dirName = (new URL(url)).hostname;
let dirName = new URL(url).hostname;
let caPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCA);
let certPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCert);
let keyPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameKey);
if (dockerType === "tcp" && await fsExists(caPath) && await fsExists(certPath) && await fsExists(keyPath)) {
if (
dockerType === "tcp" &&
(await fsExists(caPath)) &&
(await fsExists(certPath)) &&
(await fsExists(keyPath))
) {
let ca = await fsAsync.readFile(caPath);
let key = await fsAsync.readFile(keyPath);
let cert = await fsAsync.readFile(certPath);
certOptions = {
ca,
key,
cert
cert,
};
}
return {
...baseOptions,
...certOptions
...certOptions,
};
}
}

View File

@@ -7,7 +7,6 @@ const mysql = require("mysql2");
* It is only used inside the docker container
*/
class EmbeddedMariaDB {
static instance = null;
exec = "mariadbd";
@@ -59,7 +58,9 @@ class EmbeddedMariaDB {
// Check if the current user is "node" or "root"
this.username = require("os").userInfo().username;
if (this.username !== "node" && this.username !== "root") {
throw new Error("Embedded Mariadb supports only 'node' or 'root' user, but the current user is: " + this.username);
throw new Error(
"Embedded Mariadb supports only 'node' or 'root' user, but the current user is: " + this.username
);
}
this.initDB();
@@ -211,7 +212,6 @@ class EmbeddedMariaDB {
log.info("mariadb", "Embedded MariaDB is ready for connections");
this.started = true;
}
}
module.exports = {

View File

@@ -6,7 +6,6 @@ let fs = require("fs");
const { log } = require("../src/util");
let ImageDataURI = (() => {
/**
* Decode the data:image/ URI
* @param {string} dataURI data:image/ URI to decode
@@ -17,7 +16,7 @@ let ImageDataURI = (() => {
*/
function decode(dataURI) {
if (!/data:image\//.test(dataURI)) {
log.error("image-data-uri", "It seems that it is not an Image Data URI. Couldn't match \"data:image/\"");
log.error("image-data-uri", 'It seems that it is not an Image Data URI. Couldn\'t match "data:image/"');
return null;
}
@@ -25,7 +24,7 @@ let ImageDataURI = (() => {
return {
imageType: regExMatches[1],
dataBase64: regExMatches[2],
dataBuffer: new Buffer(regExMatches[2], "base64")
dataBuffer: new Buffer(regExMatches[2], "base64"),
};
}
@@ -42,8 +41,8 @@ let ImageDataURI = (() => {
return null;
}
mediaType = (/\//.test(mediaType)) ? mediaType : "image/" + mediaType;
let dataBase64 = (Buffer.isBuffer(data)) ? data.toString("base64") : new Buffer(data).toString("base64");
mediaType = /\//.test(mediaType) ? mediaType : "image/" + mediaType;
let dataBase64 = Buffer.isBuffer(data) ? data.toString("base64") : new Buffer(data).toString("base64");
let dataImgBase64 = "data:" + mediaType + ";base64," + dataBase64;
return dataImgBase64;
@@ -60,7 +59,7 @@ let ImageDataURI = (() => {
return new Promise((resolve, reject) => {
let imageDecoded = decode(dataURI);
fs.writeFile(filePath, imageDecoded.dataBuffer, err => {
fs.writeFile(filePath, imageDecoded.dataBuffer, (err) => {
if (err) {
return reject("ImageDataURI :: Error :: " + JSON.stringify(err, null, 4));
}

View File

@@ -15,7 +15,7 @@ const jobs = [
interval: "*/5 * * * *",
jobFunc: incrementalVacuum,
croner: null,
}
},
];
/**
@@ -32,11 +32,10 @@ const initBackgroundJobs = async function () {
name: job.name,
timezone,
},
job.jobFunc,
job.jobFunc
);
job.croner = cornerJob;
}
};
/**
@@ -54,5 +53,5 @@ const stopBackgroundJobs = function () {
module.exports = {
initBackgroundJobs,
stopBackgroundJobs
stopBackgroundJobs,
};

View File

@@ -31,23 +31,22 @@ const clearOldData = async () => {
}
if (parsedPeriod < 1) {
log.info("clearOldData", `Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`);
log.info(
"clearOldData",
`Data deletion has been disabled as period is less than 1. Period is ${parsedPeriod} days.`
);
} else {
log.debug("clearOldData", `Clearing Data older than ${parsedPeriod} days...`);
const sqlHourOffset = Database.sqlHourOffset();
try {
// Heartbeat
await R.exec("DELETE FROM heartbeat WHERE time < " + sqlHourOffset, [
parsedPeriod * -24,
]);
await R.exec("DELETE FROM heartbeat WHERE time < " + sqlHourOffset, [parsedPeriod * -24]);
let timestamp = dayjs().subtract(parsedPeriod, "day").utc().startOf("day").unix();
// stat_daily
await R.exec("DELETE FROM stat_daily WHERE timestamp < ? ", [
timestamp,
]);
await R.exec("DELETE FROM stat_daily WHERE timestamp < ? ", [timestamp]);
if (Database.dbConfig.type === "sqlite") {
await R.exec("PRAGMA optimize;");

View File

@@ -7,10 +7,12 @@ const { Notification } = require("../notification");
const { default: NodeFetchCache, MemoryCache } = require("node-fetch-cache");
const TranslatableError = require("../translatable-error");
const cachedFetch = process.env.NODE_ENV ? NodeFetchCache.create({
const cachedFetch = process.env.NODE_ENV
? NodeFetchCache.create({
// cache for 8h
cache: new MemoryCache({ ttl: 1000 * 60 * 60 * 8 })
}) : fetch;
cache: new MemoryCache({ ttl: 1000 * 60 * 60 * 8 }),
})
: fetch;
/**
* Find the RDAP server for a given TLD
@@ -28,7 +30,7 @@ async function getRdapServer(tld) {
}
for (const service of rdapList["services"]) {
const [ tlds, urls ] = service;
const [tlds, urls] = service;
if (tlds.includes(tld)) {
return urls[0];
}
@@ -108,7 +110,7 @@ class DomainExpiry extends BeanModel {
* @returns {Promise<DomainExpiry>} Domain bean
*/
static async findByName(domain) {
return R.findOne("domain_expiry", "domain = ?", [ domain ]);
return R.findOne("domain_expiry", "domain = ?", [domain]);
}
/**
@@ -159,7 +161,7 @@ class DomainExpiry extends BeanModel {
if (!tld.domain) {
throw new TranslatableError("domain_expiry_unsupported_invalid_domain", { hostname: tld.hostname });
}
if ( !tld.publicSuffix) {
if (!tld.publicSuffix) {
throw new TranslatableError("domain_expiry_unsupported_public_suffix", { publicSuffix: tld.publicSuffix });
}
if (tld.isIp) {
@@ -176,9 +178,14 @@ class DomainExpiry extends BeanModel {
// Only warn when the monitor actually has domain expiry notifications enabled.
// The edit monitor page calls this method frequently while the user is typing.
if (Boolean(monitor.domainExpiryNotification)) {
log.warn("domain_expiry", `Domain expiry unsupported for '.${tld.publicSuffix}' because its RDAP endpoint is not listed in the IANA database.`);
log.warn(
"domain_expiry",
`Domain expiry unsupported for '.${tld.publicSuffix}' because its RDAP endpoint is not listed in the IANA database.`
);
}
throw new TranslatableError("domain_expiry_unsupported_unsupported_tld_no_rdap_endpoint", { publicSuffix: tld.publicSuffix });
throw new TranslatableError("domain_expiry_unsupported_unsupported_tld_no_rdap_endpoint", {
publicSuffix: tld.publicSuffix,
});
}
return {
@@ -258,7 +265,10 @@ class DomainExpiry extends BeanModel {
}
// sanity check if expiry date is valid before calculating days remaining. Should not happen and likely indicates a bug in the code.
if (!domain.expiry || isNaN(new Date(domain.expiry).getTime())) {
log.warn("domain_expiry", `No valid expiry date passed to sendNotifications for ${domainName} (expiry: ${domain.expiry}), skipping notification`);
log.warn(
"domain_expiry",
`No valid expiry date passed to sendNotifications for ${domainName} (expiry: ${domain.expiry}), skipping notification`
);
return;
}
@@ -269,8 +279,8 @@ class DomainExpiry extends BeanModel {
let notifyDays = await setting("domainExpiryNotifyDays");
if (notifyDays == null || !Array.isArray(notifyDays)) {
// Reset Default
await setSetting("domainExpiryNotifyDays", [ 7, 14, 21 ], "general");
notifyDays = [ 7, 14, 21 ];
await setSetting("domainExpiryNotifyDays", [7, 14, 21], "general");
notifyDays = [7, 14, 21];
}
if (Array.isArray(notifyDays)) {
// Asc sort to avoid sending multiple notifications if daysRemaining is below multiple targetDays

View File

@@ -2,7 +2,6 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
const { R } = require("redbean-node");
class Group extends BeanModel {
/**
* Return an object that ready to parse to JSON for public Only show
* necessary data to public
@@ -32,14 +31,18 @@ class Group extends BeanModel {
* @returns {Promise<Bean[]>} List of monitors
*/
async getMonitorList() {
return R.convertToBeans("monitor", await R.getAll(`
return R.convertToBeans(
"monitor",
await R.getAll(
`
SELECT monitor.*, monitor_group.send_url, monitor_group.custom_url FROM monitor, monitor_group
WHERE monitor.id = monitor_group.monitor_id
AND group_id = ?
ORDER BY monitor_group.weight
`, [
this.id,
]));
`,
[this.id]
)
);
}
}

View File

@@ -8,7 +8,6 @@ const { BeanModel } = require("redbean-node/dist/bean-model");
* 3 = MAINTENANCE
*/
class Heartbeat extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
@@ -39,7 +38,6 @@ class Heartbeat extends BeanModel {
retries: this._retries,
};
}
}
module.exports = Heartbeat;

View File

@@ -1,7 +1,6 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Incident extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public

View File

@@ -7,14 +7,12 @@ const { UptimeKumaServer } = require("../uptime-kuma-server");
const apicache = require("../modules/apicache");
class Maintenance extends BeanModel {
/**
* Return an object that ready to parse to JSON for public
* Only show necessary data to public
* @returns {Promise<object>} Object ready to parse
*/
async toPublicJSON() {
let dateRange = [];
if (this.start_date) {
dateRange.push(this.start_date);
@@ -41,8 +39,8 @@ class Maintenance extends BeanModel {
active: !!this.active,
dateRange: dateRange,
timeRange: timeRange,
weekdays: (this.weekdays) ? JSON.parse(this.weekdays) : [],
daysOfMonth: (this.days_of_month) ? JSON.parse(this.days_of_month) : [],
weekdays: this.weekdays ? JSON.parse(this.weekdays) : [],
daysOfMonth: this.days_of_month ? JSON.parse(this.days_of_month) : [],
timeslotList: [],
cron: this.cron,
duration: this.duration,
@@ -202,7 +200,7 @@ class Maintenance extends BeanModel {
* @returns {void}
*/
static validateCron(cron) {
let job = new Cron(cron, () => { });
let job = new Cron(cron, () => {});
job.stop();
}
@@ -270,7 +268,7 @@ class Maintenance extends BeanModel {
if (this.strategy === "recurring-interval") {
// For recurring-interval, Croner needs to have interval and startAt
const startDate = dayjs(this.startDate);
const [ hour, minute ] = this.startTime.split(":");
const [hour, minute] = this.startTime.split(":");
const startDateTime = startDate.hour(hour).minute(minute);
// Fix #6118, since the startDateTime is optional, it will throw error if the date is null when using toISOString()
@@ -279,31 +277,44 @@ class Maintenance extends BeanModel {
startAt = startDateTime.toISOString();
} catch (_) {}
this.beanMeta.job = new Cron(this.cron, {
this.beanMeta.job = new Cron(
this.cron,
{
timezone: await this.getTimezone(),
startAt,
}, () => {
},
() => {
if (!this.lastStartDate || this.interval_day === 1) {
return startEvent();
}
// If last start date is set, it means the maintenance has been started before
let lastStartDate = dayjs(this.lastStartDate)
.subtract(1.1, "hour"); // Subtract 1.1 hour to avoid issues with timezone differences
let lastStartDate = dayjs(this.lastStartDate).subtract(1.1, "hour"); // Subtract 1.1 hour to avoid issues with timezone differences
// Check if the interval is enough
if (current.diff(lastStartDate, "day") < this.interval_day) {
log.debug("maintenance", "Maintenance id: " + this.id + " is still in the window, skipping start event");
log.debug(
"maintenance",
"Maintenance id: " + this.id + " is still in the window, skipping start event"
);
return;
}
log.debug("maintenance", "Maintenance id: " + this.id + " is not in the window, starting event");
log.debug(
"maintenance",
"Maintenance id: " + this.id + " is not in the window, starting event"
);
return startEvent();
});
}
);
} else {
this.beanMeta.job = new Cron(this.cron, {
this.beanMeta.job = new Cron(
this.cron,
{
timezone: await this.getTimezone(),
}, startEvent);
},
startEvent
);
}
// Continue if the maintenance is still in the window
@@ -314,7 +325,6 @@ class Maintenance extends BeanModel {
log.debug("maintenance", "Maintenance id: " + this.id + " Remaining duration: " + duration + "ms");
startEvent(duration);
}
} catch (e) {
log.error("maintenance", "Error in maintenance id: " + this.id);
log.error("maintenance", "Cron: " + this.cron);
@@ -324,7 +334,6 @@ class Maintenance extends BeanModel {
throw e;
}
}
} else {
log.error("maintenance", "Maintenance id: " + this.id + " has no cron");
}
@@ -486,12 +495,11 @@ class Maintenance extends BeanModel {
}
// Remove duplicate
dayList = [ ...new Set(dayList) ];
dayList = [...new Set(dayList)];
this.cron = minute + " " + hour + " " + dayList.join(",") + " * *";
this.duration = this.calcDuration();
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -9,15 +9,22 @@ const { Feed } = require("feed");
const config = require("../config");
const { setting } = require("../util-server");
const { STATUS_PAGE_ALL_DOWN, STATUS_PAGE_ALL_UP, STATUS_PAGE_MAINTENANCE, STATUS_PAGE_PARTIAL_DOWN, UP, MAINTENANCE, DOWN } = require("../../src/util");
const {
STATUS_PAGE_ALL_DOWN,
STATUS_PAGE_ALL_UP,
STATUS_PAGE_MAINTENANCE,
STATUS_PAGE_PARTIAL_DOWN,
UP,
MAINTENANCE,
DOWN,
} = require("../../src/util");
class StatusPage extends BeanModel {
/**
* Like this: { "test-uptime.kuma.pet": "default" }
* @type {{}}
*/
static domainMappingList = { };
static domainMappingList = {};
/**
* Handle responses to RSS pages
@@ -27,9 +34,7 @@ class StatusPage extends BeanModel {
* @returns {Promise<void>}
*/
static async handleStatusPageRSSResponse(response, slug, request) {
let statusPage = await R.findOne("status_page", " slug = ? ", [
slug
]);
let statusPage = await R.findOne("status_page", " slug = ? ", [slug]);
if (statusPage) {
const feedUrl = await StatusPage.buildRSSUrl(slug, request);
@@ -54,9 +59,7 @@ class StatusPage extends BeanModel {
slug = "default";
}
let statusPage = await R.findOne("status_page", " slug = ? ", [
slug
]);
let statusPage = await R.findOne("status_page", " slug = ? ", [slug]);
if (statusPage) {
response.send(await StatusPage.renderHTML(indexHTML, statusPage));
@@ -90,7 +93,7 @@ class StatusPage extends BeanModel {
updated: new Date(), // optional, default = today
});
heartbeats.forEach(heartbeat => {
heartbeats.forEach((heartbeat) => {
feed.addItem({
title: `${heartbeat.name} is down`,
description: `${heartbeat.name} has been down since ${heartbeat.time} UTC`,
@@ -153,9 +156,7 @@ class StatusPage extends BeanModel {
$("meta[name=description]").attr("content", description155);
if (statusPage.icon) {
$("link[rel=icon]")
.attr("href", statusPage.icon)
.removeAttr("type");
$("link[rel=icon]").attr("href", statusPage.icon).removeAttr("type");
$("link[rel=apple-touch-icon]").remove();
}
@@ -168,19 +169,19 @@ class StatusPage extends BeanModel {
}
// OG Meta Tags
let ogTitle = $("<meta property=\"og:title\" content=\"\" />").attr("content", statusPage.title);
let ogTitle = $('<meta property="og:title" content="" />').attr("content", statusPage.title);
head.append(ogTitle);
let ogDescription = $("<meta property=\"og:description\" content=\"\" />").attr("content", description155);
let ogDescription = $('<meta property="og:description" content="" />').attr("content", description155);
head.append(ogDescription);
let ogType = $("<meta property=\"og:type\" content=\"website\" />");
let ogType = $('<meta property="og:type" content="website" />');
head.append(ogType);
// Preload data
// Add jsesc, fix https://github.com/louislam/uptime-kuma/issues/2186
const escapedJSONObject = jsesc(await StatusPage.getStatusPageData(statusPage), {
"isScriptContext": true
isScriptContext: true,
});
const script = $(`
@@ -219,7 +220,7 @@ class StatusPage extends BeanModel {
}
}
if (! hasUp) {
if (!hasUp) {
status = STATUS_PAGE_ALL_DOWN;
}
@@ -267,21 +268,19 @@ class StatusPage extends BeanModel {
// Public Group List
const showTags = !!statusPage.show_tags;
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [
statusPage.id
]);
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [statusPage.id]);
let heartbeats = [];
for (let groupBean of list) {
let monitorGroup = await groupBean.toPublicJSON(showTags, config?.showCertificateExpiry);
for (const monitor of monitorGroup.monitorList) {
const heartbeat = await R.findOne("heartbeat", "monitor_id = ? ORDER BY time DESC", [ monitor.id ]);
const heartbeat = await R.findOne("heartbeat", "monitor_id = ? ORDER BY time DESC", [monitor.id]);
if (heartbeat) {
heartbeats.push({
...monitor,
status: heartbeat.status,
time: heartbeat.time
time: heartbeat.time,
});
}
}
@@ -292,11 +291,11 @@ class StatusPage extends BeanModel {
let statusDescription = StatusPage.getStatusDescription(status);
// keep only DOWN heartbeats in the RSS feed
heartbeats = heartbeats.filter(heartbeat => heartbeat.status === DOWN);
heartbeats = heartbeats.filter((heartbeat) => heartbeat.status === DOWN);
return {
heartbeats,
statusDescription
statusDescription,
};
}
@@ -309,9 +308,7 @@ class StatusPage extends BeanModel {
const config = await statusPage.toPublicJSON();
// Incident
let incident = await R.findOne("incident", " pin = 1 AND active = 1 AND status_page_id = ? ", [
statusPage.id,
]);
let incident = await R.findOne("incident", " pin = 1 AND active = 1 AND status_page_id = ? ", [statusPage.id]);
if (incident) {
incident = incident.toPublicJSON();
@@ -323,9 +320,7 @@ class StatusPage extends BeanModel {
const publicGroupList = [];
const showTags = !!statusPage.show_tags;
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [
statusPage.id
]);
const list = await R.find("group", " public = 1 AND status_page_id = ? ORDER BY weight ", [statusPage.id]);
for (let groupBean of list) {
let monitorGroup = await groupBean.toPublicJSON(showTags, config?.showCertificateExpiry);
@@ -379,16 +374,13 @@ class StatusPage extends BeanModel {
* @returns {Promise<void>}
*/
async updateDomainNameList(domainNameList) {
if (!Array.isArray(domainNameList)) {
throw new Error("Invalid array");
}
let trx = await R.begin();
await trx.exec("DELETE FROM status_page_cname WHERE status_page_id = ?", [
this.id,
]);
await trx.exec("DELETE FROM status_page_cname WHERE status_page_id = ?", [this.id]);
try {
for (let domain of domainNameList) {
@@ -401,9 +393,7 @@ class StatusPage extends BeanModel {
}
// If the domain name is used in another status page, delete it
await trx.exec("DELETE FROM status_page_cname WHERE domain = ?", [
domain,
]);
await trx.exec("DELETE FROM status_page_cname WHERE domain = ?", [domain]);
let mapping = trx.dispense("status_page_cname");
mapping.status_page_id = this.id;
@@ -494,9 +484,7 @@ class StatusPage extends BeanModel {
* @returns {Promise<number>} ID of status page
*/
static async slugToID(slug) {
return await R.getCell("SELECT id FROM status_page WHERE slug = ? ", [
slug
]);
return await R.getCell("SELECT id FROM status_page WHERE slug = ? ", [slug]);
}
/**
@@ -520,21 +508,23 @@ class StatusPage extends BeanModel {
try {
const publicMaintenanceList = [];
let maintenanceIDList = await R.getCol(`
let maintenanceIDList = await R.getCol(
`
SELECT DISTINCT maintenance_id
FROM maintenance_status_page
WHERE status_page_id = ?
`, [ statusPageId ]);
`,
[statusPageId]
);
for (const maintenanceID of maintenanceIDList) {
let maintenance = UptimeKumaServer.getInstance().getMaintenance(maintenanceID);
if (maintenance && await maintenance.isUnderMaintenance()) {
if (maintenance && (await maintenance.isUnderMaintenance())) {
publicMaintenanceList.push(await maintenance.toPublicJSON());
}
}
return publicMaintenanceList;
} catch (error) {
return [];
}

View File

@@ -1,7 +1,6 @@
const { BeanModel } = require("redbean-node/dist/bean-model");
class Tag extends BeanModel {
/**
* Return an object that ready to parse to JSON
* @returns {object} Object ready to parse

View File

@@ -15,7 +15,7 @@ class User extends BeanModel {
static async resetPassword(userID, newPassword) {
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
await passwordHash.generate(newPassword),
userID
userID,
]);
}
@@ -27,10 +27,7 @@ class User extends BeanModel {
async resetPassword(newPassword) {
const hashedPassword = await passwordHash.generate(newPassword);
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [
hashedPassword,
this.id
]);
await R.exec("UPDATE `user` SET password = ? WHERE id = ? ", [hashedPassword, this.id]);
this.password = hashedPassword;
}
@@ -42,12 +39,14 @@ class User extends BeanModel {
* @returns {string} the JsonWebToken as a string
*/
static createJWT(user, jwtSecret) {
return jwt.sign({
return jwt.sign(
{
username: user.username,
h: shake256(user.password, SHAKE256_LENGTH),
}, jwtSecret);
},
jwtSecret
);
}
}
module.exports = User;

View File

@@ -146,7 +146,7 @@ function ApiCache() {
let groupName = req.apicacheGroup;
if (groupName) {
debug("group detected \"" + groupName + "\"");
debug('group detected "' + groupName + '"');
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
@@ -219,9 +219,12 @@ function ApiCache() {
}
// add automatic cache clearing from duration, includes max limit on setTimeout
timers[key] = setTimeout(function () {
timers[key] = setTimeout(
function () {
instance.clear(key, true);
}, Math.min(duration, 2147483647));
},
Math.min(duration, 2147483647)
);
}
/**
@@ -246,10 +249,7 @@ function ApiCache() {
oldContent = !Buffer.alloc ? new Buffer(0) : Buffer.alloc(0);
}
res._apicache.content = Buffer.concat(
[oldContent, content],
oldContent.length + content.length
);
res._apicache.content = Buffer.concat([oldContent, content], oldContent.length + content.length);
} else {
res._apicache.content = content;
}
@@ -310,17 +310,12 @@ function ApiCache() {
if (res._apicache.cacheable && res._apicache.content) {
addIndexEntries(key, req);
let headers = res._apicache.headers || getSafeHeaders(res);
let cacheObject = createCacheObject(
res.statusCode,
headers,
res._apicache.content,
encoding
);
let cacheObject = createCacheObject(res.statusCode, headers, res._apicache.content, encoding);
cacheResponse(key, cacheObject, duration);
// display log entry
let elapsed = new Date() - req.apicacheTimer;
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug('adding cache entry for "' + key + '" @ ' + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
@@ -366,8 +361,7 @@ function ApiCache() {
// unstringify buffers
let data = cacheObject.data;
if (data && data.type === "Buffer") {
data =
typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
data = typeof data.data === "number" ? new Buffer.alloc(data.data) : new Buffer.from(data.data);
}
// test Etag against If-None-Match for 304
@@ -402,10 +396,10 @@ function ApiCache() {
let redis = globalOptions.redisClient;
if (group) {
debug("clearing group \"" + target + "\"");
debug('clearing group "' + target + '"');
group.forEach(function (key) {
debug("clearing cached entry for \"" + key + "\"");
debug('clearing cached entry for "' + key + '"');
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
@@ -414,7 +408,7 @@ function ApiCache() {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
console.log('[apicache] error in redis.del("' + key + '")');
}
}
index.all = index.all.filter(doesntMatch(key));
@@ -422,7 +416,7 @@ function ApiCache() {
delete index.groups[target];
} else if (target) {
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
debug("clearing " + (isAutomatic ? "expired" : "cached") + ' entry for "' + target + '"');
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
@@ -432,7 +426,7 @@ function ApiCache() {
try {
redis.del(target);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + target + "\")");
console.log('[apicache] error in redis.del("' + target + '")');
}
}
@@ -461,7 +455,7 @@ function ApiCache() {
try {
redis.del(key);
} catch (err) {
console.log("[apicache] error in redis.del(\"" + key + "\")");
console.log('[apicache] error in redis.del("' + key + '")');
}
});
}
@@ -830,15 +824,7 @@ function ApiCache() {
);
} else {
perf.miss(key);
return makeResponseCacheable(
req,
res,
next,
key,
duration,
strDuration,
middlewareToggle
);
return makeResponseCacheable(req, res, next, key, duration, strDuration, middlewareToggle);
}
});
} catch (err) {

View File

@@ -1,9 +1,7 @@
const apicache = require("./apicache");
apicache.options({
headerBlacklist: [
"cache-control"
],
headerBlacklist: ["cache-control"],
headers: {
// Disable client side cache, only server side cache.
// BUG! Not working for the second request

View File

@@ -22,7 +22,7 @@ MemoryCache.prototype.add = function (key, value, time, timeoutCallback) {
timeout: setTimeout(function () {
instance.delete(key);
return timeoutCallback && typeof timeoutCallback === "function" && timeoutCallback(value, key);
}, time)
}, time),
};
this.cache[key] = entry;

View File

@@ -1,4 +1,4 @@
'use strict';
"use strict";
// Original file https://raw.githubusercontent.com/elasticio/node-ntlm-client/master/lib/flags.js
module.exports.NTLMFLAG_NEGOTIATE_UNICODE = 1 << 0;
/* Indicates that Unicode strings are supported for use in security buffer

View File

@@ -1,8 +1,9 @@
'use strict';
"use strict";
// Original source at https://github.com/elasticio/node-ntlm-client/blob/master/lib/hash.js
var crypto = require('crypto');
var crypto = require("crypto");
function createLMResponse(challenge, lmhash) {
var buf = new Buffer.alloc(24), pwBuffer = new Buffer.alloc(21).fill(0);
var buf = new Buffer.alloc(24),
pwBuffer = new Buffer.alloc(21).fill(0);
lmhash.copy(pwBuffer);
calculateDES(pwBuffer.slice(0, 7), challenge).copy(buf);
calculateDES(pwBuffer.slice(7, 14), challenge).copy(buf, 8);
@@ -10,40 +11,40 @@ function createLMResponse(challenge, lmhash) {
return buf;
}
function createLMHash(password) {
var buf = new Buffer.alloc(16), pwBuffer = new Buffer.alloc(14), magicKey = new Buffer.from('KGS!@#$%', 'ascii');
var buf = new Buffer.alloc(16),
pwBuffer = new Buffer.alloc(14),
magicKey = new Buffer.from("KGS!@#$%", "ascii");
if (password.length > 14) {
buf.fill(0);
return buf;
}
pwBuffer.fill(0);
pwBuffer.write(password.toUpperCase(), 0, 'ascii');
return Buffer.concat([
calculateDES(pwBuffer.slice(0, 7), magicKey),
calculateDES(pwBuffer.slice(7), magicKey)
]);
pwBuffer.write(password.toUpperCase(), 0, "ascii");
return Buffer.concat([calculateDES(pwBuffer.slice(0, 7), magicKey), calculateDES(pwBuffer.slice(7), magicKey)]);
}
function calculateDES(key, message) {
var desKey = new Buffer.alloc(8);
desKey[0] = key[0] & 0xFE;
desKey[1] = ((key[0] << 7) & 0xFF) | (key[1] >> 1);
desKey[2] = ((key[1] << 6) & 0xFF) | (key[2] >> 2);
desKey[3] = ((key[2] << 5) & 0xFF) | (key[3] >> 3);
desKey[4] = ((key[3] << 4) & 0xFF) | (key[4] >> 4);
desKey[5] = ((key[4] << 3) & 0xFF) | (key[5] >> 5);
desKey[6] = ((key[5] << 2) & 0xFF) | (key[6] >> 6);
desKey[7] = (key[6] << 1) & 0xFF;
desKey[0] = key[0] & 0xfe;
desKey[1] = ((key[0] << 7) & 0xff) | (key[1] >> 1);
desKey[2] = ((key[1] << 6) & 0xff) | (key[2] >> 2);
desKey[3] = ((key[2] << 5) & 0xff) | (key[3] >> 3);
desKey[4] = ((key[3] << 4) & 0xff) | (key[4] >> 4);
desKey[5] = ((key[4] << 3) & 0xff) | (key[5] >> 5);
desKey[6] = ((key[5] << 2) & 0xff) | (key[6] >> 6);
desKey[7] = (key[6] << 1) & 0xff;
for (var i = 0; i < 8; i++) {
var parity = 0;
for (var j = 1; j < 8; j++) {
parity += (desKey[i] >> j) % 2;
}
desKey[i] |= (parity % 2) === 0 ? 1 : 0;
desKey[i] |= parity % 2 === 0 ? 1 : 0;
}
var des = crypto.createCipheriv('DES-ECB', desKey, '');
var des = crypto.createCipheriv("DES-ECB", desKey, "");
return des.update(message);
}
function createNTLMResponse(challenge, ntlmhash) {
var buf = new Buffer.alloc(24), ntlmBuffer = new Buffer.alloc(21).fill(0);
var buf = new Buffer.alloc(24),
ntlmBuffer = new Buffer.alloc(21).fill(0);
ntlmhash.copy(ntlmBuffer);
calculateDES(ntlmBuffer.slice(0, 7), challenge).copy(buf);
calculateDES(ntlmBuffer.slice(7, 14), challenge).copy(buf, 8);
@@ -51,21 +52,23 @@ function createNTLMResponse(challenge, ntlmhash) {
return buf;
}
function createNTLMHash(password) {
var md4sum = crypto.createHash('md4');
md4sum.update(new Buffer.from(password, 'ucs2'));
var md4sum = crypto.createHash("md4");
md4sum.update(new Buffer.from(password, "ucs2"));
return md4sum.digest();
}
function createNTLMv2Hash(ntlmhash, username, authTargetName) {
var hmac = crypto.createHmac('md5', ntlmhash);
hmac.update(new Buffer.from(username.toUpperCase() + authTargetName, 'ucs2'));
var hmac = crypto.createHmac("md5", ntlmhash);
hmac.update(new Buffer.from(username.toUpperCase() + authTargetName, "ucs2"));
return hmac.digest();
}
function createLMv2Response(type2message, username, ntlmhash, nonce, targetName) {
var buf = new Buffer.alloc(24), ntlm2hash = createNTLMv2Hash(ntlmhash, username, targetName), hmac = crypto.createHmac('md5', ntlm2hash);
var buf = new Buffer.alloc(24),
ntlm2hash = createNTLMv2Hash(ntlmhash, username, targetName),
hmac = crypto.createHmac("md5", ntlm2hash);
//server challenge
type2message.challenge.copy(buf, 8);
//client nonce
buf.write(nonce || createPseudoRandomValue(16), 16, 'hex');
buf.write(nonce || createPseudoRandomValue(16), 16, "hex");
//create hash
hmac.update(buf.slice(8));
var hashedBuffer = hmac.digest();
@@ -73,7 +76,9 @@ function createLMv2Response(type2message, username, ntlmhash, nonce, targetName)
return buf;
}
function createNTLMv2Response(type2message, username, ntlmhash, nonce, targetName) {
var buf = new Buffer.alloc(48 + type2message.targetInfo.buffer.length), ntlm2hash = createNTLMv2Hash(ntlmhash, username, targetName), hmac = crypto.createHmac('md5', ntlm2hash);
var buf = new Buffer.alloc(48 + type2message.targetInfo.buffer.length),
ntlm2hash = createNTLMv2Hash(ntlmhash, username, targetName),
hmac = crypto.createHmac("md5", ntlm2hash);
//the first 8 bytes are spare to store the hashed value before the blob
//server challenge
type2message.challenge.copy(buf, 8);
@@ -86,12 +91,12 @@ function createNTLMv2Response(type2message, username, ntlmhash, nonce, targetNam
// maybe think about a different solution here
// 11644473600000 = diff between 1970 and 1601
var timestamp = ((Date.now() + 11644473600000) * 10000).toString(16);
var timestampLow = Number('0x' + timestamp.substring(Math.max(0, timestamp.length - 8)));
var timestampHigh = Number('0x' + timestamp.substring(0, Math.max(0, timestamp.length - 8)));
var timestampLow = Number("0x" + timestamp.substring(Math.max(0, timestamp.length - 8)));
var timestampHigh = Number("0x" + timestamp.substring(0, Math.max(0, timestamp.length - 8)));
buf.writeUInt32LE(timestampLow, 24, false);
buf.writeUInt32LE(timestampHigh, 28, false);
//random client nonce
buf.write(nonce || createPseudoRandomValue(16), 32, 'hex');
buf.write(nonce || createPseudoRandomValue(16), 32, "hex");
//zero
buf.writeUInt32LE(0, 40);
//complete target information block from type 2 message
@@ -104,7 +109,7 @@ function createNTLMv2Response(type2message, username, ntlmhash, nonce, targetNam
return buf;
}
function createPseudoRandomValue(length) {
var str = '';
var str = "";
while (str.length < length) {
str += crypto.randomInt(16).toString(16);
}
@@ -117,6 +122,6 @@ module.exports = {
createNTLMResponse: createNTLMResponse,
createLMv2Response: createLMv2Response,
createNTLMv2Response: createNTLMv2Response,
createPseudoRandomValue: createPseudoRandomValue
createPseudoRandomValue: createPseudoRandomValue,
};
//# sourceMappingURL=hash.js.map

View File

@@ -1,23 +1,30 @@
'use strict';
"use strict";
// Original file https://raw.githubusercontent.com/elasticio/node-ntlm-client/master/lib/ntlm.js
var os = require('os'), flags = require('./flags'), hash = require('./hash');
var os = require("os"),
flags = require("./flags"),
hash = require("./hash");
var NTLMSIGNATURE = "NTLMSSP\0";
function createType1Message(workstation, target) {
var dataPos = 32, pos = 0, buf = new Buffer.alloc(1024);
var dataPos = 32,
pos = 0,
buf = new Buffer.alloc(1024);
workstation = workstation === undefined ? os.hostname() : workstation;
target = target === undefined ? '' : target;
target = target === undefined ? "" : target;
//signature
buf.write(NTLMSIGNATURE, pos, NTLMSIGNATURE.length, 'ascii');
buf.write(NTLMSIGNATURE, pos, NTLMSIGNATURE.length, "ascii");
pos += NTLMSIGNATURE.length;
//message type
buf.writeUInt32LE(1, pos);
pos += 4;
//flags
buf.writeUInt32LE(flags.NTLMFLAG_NEGOTIATE_OEM |
buf.writeUInt32LE(
flags.NTLMFLAG_NEGOTIATE_OEM |
flags.NTLMFLAG_REQUEST_TARGET |
flags.NTLMFLAG_NEGOTIATE_NTLM_KEY |
flags.NTLMFLAG_NEGOTIATE_NTLM2_KEY |
flags.NTLMFLAG_NEGOTIATE_ALWAYS_SIGN, pos);
flags.NTLMFLAG_NEGOTIATE_ALWAYS_SIGN,
pos
);
pos += 4;
//domain security buffer
buf.writeUInt16LE(target.length, pos);
@@ -27,7 +34,7 @@ function createType1Message(workstation, target) {
buf.writeUInt32LE(target.length === 0 ? 0 : dataPos, pos);
pos += 4;
if (target.length > 0) {
dataPos += buf.write(target, dataPos, 'ascii');
dataPos += buf.write(target, dataPos, "ascii");
}
//workstation security buffer
buf.writeUInt16LE(workstation.length, pos);
@@ -37,40 +44,40 @@ function createType1Message(workstation, target) {
buf.writeUInt32LE(workstation.length === 0 ? 0 : dataPos, pos);
pos += 4;
if (workstation.length > 0) {
dataPos += buf.write(workstation, dataPos, 'ascii');
dataPos += buf.write(workstation, dataPos, "ascii");
}
return 'NTLM ' + buf.toString('base64', 0, dataPos);
return "NTLM " + buf.toString("base64", 0, dataPos);
}
function decodeType2Message(str) {
if (str === undefined) {
throw new Error('Invalid argument');
throw new Error("Invalid argument");
}
//convenience
if (Object.prototype.toString.call(str) !== '[object String]') {
if (str.hasOwnProperty('headers') && str.headers.hasOwnProperty('www-authenticate')) {
str = str.headers['www-authenticate'];
}
else {
throw new Error('Invalid argument');
if (Object.prototype.toString.call(str) !== "[object String]") {
if (str.hasOwnProperty("headers") && str.headers.hasOwnProperty("www-authenticate")) {
str = str.headers["www-authenticate"];
} else {
throw new Error("Invalid argument");
}
}
var ntlmMatch = /^NTLM ([^,\s]+)/.exec(str);
if (ntlmMatch) {
str = ntlmMatch[1];
}
var buf = new Buffer.from(str, 'base64'), obj = {};
var buf = new Buffer.from(str, "base64"),
obj = {};
//check signature
if (buf.toString('ascii', 0, NTLMSIGNATURE.length) !== NTLMSIGNATURE) {
throw new Error('Invalid message signature: ' + str);
if (buf.toString("ascii", 0, NTLMSIGNATURE.length) !== NTLMSIGNATURE) {
throw new Error("Invalid message signature: " + str);
}
//check message type
if (buf.readUInt32LE(NTLMSIGNATURE.length) !== 2) {
throw new Error('Invalid message type (no type 2)');
throw new Error("Invalid message type (no type 2)");
}
//read flags
obj.flags = buf.readUInt32LE(20);
obj.encoding = (obj.flags & flags.NTLMFLAG_NEGOTIATE_OEM) ? 'ascii' : 'ucs2';
obj.version = (obj.flags & flags.NTLMFLAG_NEGOTIATE_NTLM2_KEY) ? 2 : 1;
obj.encoding = obj.flags & flags.NTLMFLAG_NEGOTIATE_OEM ? "ascii" : "ucs2";
obj.version = obj.flags & flags.NTLMFLAG_NEGOTIATE_NTLM2_KEY ? 2 : 1;
obj.challenge = buf.slice(24, 32);
//read target name
obj.targetName = (function () {
@@ -78,10 +85,10 @@ function decodeType2Message(str) {
//skipping allocated space
var offset = buf.readUInt32LE(16);
if (length === 0) {
return '';
return "";
}
if ((offset + length) > buf.length || offset < 32) {
throw new Error('Bad type 2 message');
if (offset + length > buf.length || offset < 32) {
throw new Error("Bad type 2 message");
}
return buf.toString(obj.encoding, offset, offset + length);
})();
@@ -97,11 +104,11 @@ function decodeType2Message(str) {
if (length === 0) {
return info;
}
if ((offset + length) > buf.length || offset < 32) {
throw new Error('Bad type 2 message');
if (offset + length > buf.length || offset < 32) {
throw new Error("Bad type 2 message");
}
var pos = offset;
while (pos < (offset + length)) {
while (pos < offset + length) {
var blockType = buf.readUInt16LE(pos);
pos += 2;
var blockLength = buf.readUInt16LE(pos);
@@ -113,39 +120,40 @@ function decodeType2Message(str) {
var blockTypeStr = void 0;
switch (blockType) {
case 1:
blockTypeStr = 'SERVER';
blockTypeStr = "SERVER";
break;
case 2:
blockTypeStr = 'DOMAIN';
blockTypeStr = "DOMAIN";
break;
case 3:
blockTypeStr = 'FQDN';
blockTypeStr = "FQDN";
break;
case 4:
blockTypeStr = 'DNS';
blockTypeStr = "DNS";
break;
case 5:
blockTypeStr = 'PARENT_DNS';
blockTypeStr = "PARENT_DNS";
break;
default:
blockTypeStr = '';
blockTypeStr = "";
break;
}
if (blockTypeStr) {
info[blockTypeStr] = buf.toString('ucs2', pos, pos + blockLength);
info[blockTypeStr] = buf.toString("ucs2", pos, pos + blockLength);
}
pos += blockLength;
}
return {
parsed: info,
buffer: targetInfoBuffer
buffer: targetInfoBuffer,
};
})();
}
return obj;
}
function createType3Message(type2Message, username, password, workstation, target) {
var dataPos = 52, buf = new Buffer.alloc(1024);
var dataPos = 52,
buf = new Buffer.alloc(1024);
if (workstation === undefined) {
workstation = os.hostname();
}
@@ -153,12 +161,15 @@ function createType3Message(type2Message, username, password, workstation, targe
target = type2Message.targetName;
}
//signature
buf.write(NTLMSIGNATURE, 0, NTLMSIGNATURE.length, 'ascii');
buf.write(NTLMSIGNATURE, 0, NTLMSIGNATURE.length, "ascii");
//message type
buf.writeUInt32LE(3, 8);
if (type2Message.version === 2) {
dataPos = 64;
var ntlmHash = hash.createNTLMHash(password), nonce = hash.createPseudoRandomValue(16), lmv2 = hash.createLMv2Response(type2Message, username, ntlmHash, nonce, target), ntlmv2 = hash.createNTLMv2Response(type2Message, username, ntlmHash, nonce, target);
var ntlmHash = hash.createNTLMHash(password),
nonce = hash.createPseudoRandomValue(16),
lmv2 = hash.createLMv2Response(type2Message, username, ntlmHash, nonce, target),
ntlmv2 = hash.createNTLMv2Response(type2Message, username, ntlmHash, nonce, target);
//lmv2 security buffer
buf.writeUInt16LE(lmv2.length, 12);
buf.writeUInt16LE(lmv2.length, 14);
@@ -171,9 +182,11 @@ function createType3Message(type2Message, username, password, workstation, targe
buf.writeUInt32LE(dataPos, 24);
ntlmv2.copy(buf, dataPos);
dataPos += ntlmv2.length;
}
else {
var lmHash = hash.createLMHash(password), ntlmHash = hash.createNTLMHash(password), lm = hash.createLMResponse(type2Message.challenge, lmHash), ntlm = hash.createNTLMResponse(type2Message.challenge, ntlmHash);
} else {
var lmHash = hash.createLMHash(password),
ntlmHash = hash.createNTLMHash(password),
lm = hash.createLMResponse(type2Message.challenge, lmHash),
ntlm = hash.createNTLMResponse(type2Message.challenge, ntlmHash);
//lm security buffer
buf.writeUInt16LE(lm.length, 12);
buf.writeUInt16LE(lm.length, 14);
@@ -188,18 +201,18 @@ function createType3Message(type2Message, username, password, workstation, targe
dataPos += ntlm.length;
}
//target name security buffer
buf.writeUInt16LE(type2Message.encoding === 'ascii' ? target.length : target.length * 2, 28);
buf.writeUInt16LE(type2Message.encoding === 'ascii' ? target.length : target.length * 2, 30);
buf.writeUInt16LE(type2Message.encoding === "ascii" ? target.length : target.length * 2, 28);
buf.writeUInt16LE(type2Message.encoding === "ascii" ? target.length : target.length * 2, 30);
buf.writeUInt32LE(dataPos, 32);
dataPos += buf.write(target, dataPos, type2Message.encoding);
//user name security buffer
buf.writeUInt16LE(type2Message.encoding === 'ascii' ? username.length : username.length * 2, 36);
buf.writeUInt16LE(type2Message.encoding === 'ascii' ? username.length : username.length * 2, 38);
buf.writeUInt16LE(type2Message.encoding === "ascii" ? username.length : username.length * 2, 36);
buf.writeUInt16LE(type2Message.encoding === "ascii" ? username.length : username.length * 2, 38);
buf.writeUInt32LE(dataPos, 40);
dataPos += buf.write(username, dataPos, type2Message.encoding);
//workstation name security buffer
buf.writeUInt16LE(type2Message.encoding === 'ascii' ? workstation.length : workstation.length * 2, 44);
buf.writeUInt16LE(type2Message.encoding === 'ascii' ? workstation.length : workstation.length * 2, 46);
buf.writeUInt16LE(type2Message.encoding === "ascii" ? workstation.length : workstation.length * 2, 44);
buf.writeUInt16LE(type2Message.encoding === "ascii" ? workstation.length : workstation.length * 2, 46);
buf.writeUInt32LE(dataPos, 48);
dataPos += buf.write(workstation, dataPos, type2Message.encoding);
if (type2Message.version === 2) {
@@ -210,11 +223,11 @@ function createType3Message(type2Message, username, password, workstation, targe
//flags
buf.writeUInt32LE(type2Message.flags, 60);
}
return 'NTLM ' + buf.toString('base64', 0, dataPos);
return "NTLM " + buf.toString("base64", 0, dataPos);
}
module.exports = {
createType1Message: createType1Message,
decodeType2Message: decodeType2Message,
createType3Message: createType3Message
createType3Message: createType3Message,
};
//# sourceMappingURL=ntlm.js.map

View File

@@ -1,62 +1,176 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
var __createBinding =
(this && this.__createBinding) ||
(Object.create
? function (o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
Object.defineProperty(o, k2, {
enumerable: true,
get: function () {
return m[k];
},
});
}
: function (o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
});
var __setModuleDefault =
(this && this.__setModuleDefault) ||
(Object.create
? function (o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
}
: function (o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
});
var __importStar =
(this && this.__importStar) ||
function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
if (mod != null)
for (var k in mod)
if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
};
var __awaiter =
(this && this.__awaiter) ||
function (thisArg, _arguments, P, generator) {
function adopt(value) {
return value instanceof P
? value
: new P(function (resolve) {
resolve(value);
});
}
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
function fulfilled(value) {
try {
step(generator.next(value));
} catch (e) {
reject(e);
}
}
function rejected(value) {
try {
step(generator["throw"](value));
} catch (e) {
reject(e);
}
}
function step(result) {
result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected);
}
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
};
var __generator =
(this && this.__generator) ||
function (thisArg, body) {
var _ = {
label: 0,
sent: function () {
if (t[0] & 1) throw t[1];
return t[1];
},
trys: [],
ops: [],
},
f,
y,
t,
g;
return (
(g = { next: verb(0), throw: verb(1), return: verb(2) }),
typeof Symbol === "function" &&
(g[Symbol.iterator] = function () {
return this;
}),
g
);
function verb(n) {
return function (v) {
return step([n, v]);
};
}
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
while (_)
try {
if (
((f = 1),
y &&
(t =
op[0] & 2
? y["return"]
: op[0]
? y["throw"] || ((t = y["return"]) && t.call(y), 0)
: y.next) &&
!(t = t.call(y, op[1])).done)
)
return t;
if (((y = 0), t)) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
case 0:
case 1:
t = op;
break;
case 4:
_.label++;
return { value: op[1], done: false };
case 5:
_.label++;
y = op[1];
op = [0];
continue;
case 7:
op = _.ops.pop();
_.trys.pop();
continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (
!((t = _.trys), (t = t.length > 0 && t[t.length - 1])) &&
(op[0] === 6 || op[0] === 2)
) {
_ = 0;
continue;
}
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) {
_.label = op[1];
break;
}
if (op[0] === 6 && _.label < t[1]) {
_.label = t[1];
t = op;
break;
}
if (t && _.label < t[2]) {
_.label = t[2];
_.ops.push(op);
break;
}
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
_.trys.pop();
continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
} catch (e) {
op = [6, e];
y = 0;
} finally {
f = t = 0;
}
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
if (op[0] & 5) throw op[1];
return { value: op[0] ? op[1] : void 0, done: true };
}
};
var __importDefault =
(this && this.__importDefault) ||
function (mod) {
return mod && mod.__esModule ? mod : { default: mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.NtlmClient = void 0;
var axios_1 = __importDefault(require("axios"));
@@ -65,11 +179,11 @@ var https = __importStar(require("https"));
var http = __importStar(require("http"));
var dev_null_1 = __importDefault(require("dev-null"));
/**
* @param credentials An NtlmCredentials object containing the username and password
* @param AxiosConfig The Axios config for the instance you wish to create
*
* @returns This function returns an axios instance configured to use the provided credentials
*/
* @param credentials An NtlmCredentials object containing the username and password
* @param AxiosConfig The Axios config for the instance you wish to create
*
* @returns This function returns an axios instance configured to use the provided credentials
*/
function NtlmClient(credentials, AxiosConfig) {
var _this = this;
var config = AxiosConfig !== null && AxiosConfig !== void 0 ? AxiosConfig : {};
@@ -80,9 +194,12 @@ function NtlmClient(credentials, AxiosConfig) {
config.httpsAgent = new https.Agent({ keepAlive: true });
}
var client = axios_1.default.create(config);
client.interceptors.response.use(function (response) {
client.interceptors.response.use(
function (response) {
return response;
}, function (err) { return __awaiter(_this, void 0, void 0, function () {
},
function (err) {
return __awaiter(_this, void 0, void 0, function () {
var error, t1Msg, t2Msg, t3Msg, stream_1;
var _a;
return __generator(this, function (_b) {
@@ -91,10 +208,20 @@ function NtlmClient(credentials, AxiosConfig) {
error = err.response;
// The header may look like this: `Negotiate, NTLM, Basic realm="itsahiddenrealm.example.net"`Add commentMore actions
// so extract the 'NTLM' part first
const ntlmheader = error.headers['www-authenticate'].split(',').find(_ => _.match(/ *NTLM/))?.trim() || '';
if (!(error && error.status === 401
&& error.headers['www-authenticate']
&& error.headers['www-authenticate'].includes('NTLM'))) return [3 /*break*/, 3];
const ntlmheader =
error.headers["www-authenticate"]
.split(",")
.find((_) => _.match(/ *NTLM/))
?.trim() || "";
if (
!(
error &&
error.status === 401 &&
error.headers["www-authenticate"] &&
error.headers["www-authenticate"].includes("NTLM")
)
)
return [3 /*break*/, 3];
// This length check is a hack because SharePoint is awkward and will
// include the Negotiate option when responding with the T2 message
// There is nore we could do to ensure we are processing correctly,
@@ -102,28 +229,40 @@ function NtlmClient(credentials, AxiosConfig) {
if (ntlmheader.length < 50) {
t1Msg = ntlm.createType1Message(credentials.workstation, credentials.domain);
error.config.headers["Authorization"] = t1Msg;
}
else {
} else {
t2Msg = ntlm.decodeType2Message((ntlmheader.match(/^NTLM\s+(.+?)(,|\s+|$)/) || [])[1]);
t3Msg = ntlm.createType3Message(t2Msg, credentials.username, credentials.password, credentials.workstation, credentials.domain);
t3Msg = ntlm.createType3Message(
t2Msg,
credentials.username,
credentials.password,
credentials.workstation,
credentials.domain
);
error.config.headers["X-retry"] = "false";
error.config.headers["Authorization"] = t3Msg;
}
if (!(error.config.responseType === "stream")) return [3 /*break*/, 2];
stream_1 = (_a = err.response) === null || _a === void 0 ? void 0 : _a.data;
if (!(stream_1 && !stream_1.readableEnded)) return [3 /*break*/, 2];
return [4 /*yield*/, new Promise(function (resolve) {
return [
4 /*yield*/,
new Promise(function (resolve) {
stream_1.pipe((0, dev_null_1.default)());
stream_1.once('close', resolve);
})];
stream_1.once("close", resolve);
}),
];
case 1:
_b.sent();
_b.label = 2;
case 2: return [2 /*return*/, client(error.config)];
case 3: throw err;
case 2:
return [2 /*return*/, client(error.config)];
case 3:
throw err;
}
});
}); });
});
}
);
return client;
}
exports.NtlmClient = NtlmClient;

View File

@@ -1,20 +1,20 @@
import { PluginFunc, ConfigType } from 'dayjs'
import { PluginFunc, ConfigType } from "dayjs";
declare const plugin: PluginFunc
export = plugin
declare const plugin: PluginFunc;
export = plugin;
declare module 'dayjs' {
declare module "dayjs" {
interface Dayjs {
tz(timezone?: string, keepLocalTime?: boolean): Dayjs
offsetName(type?: 'short' | 'long'): string | undefined
tz(timezone?: string, keepLocalTime?: boolean): Dayjs;
offsetName(type?: "short" | "long"): string | undefined;
}
interface DayjsTimezone {
(date: ConfigType, timezone?: string): Dayjs
(date: ConfigType, format: string, timezone?: string): Dayjs
guess(): string
setDefault(timezone?: string): void
(date: ConfigType, timezone?: string): Dayjs;
(date: ConfigType, format: string, timezone?: string): Dayjs;
guess(): string;
setDefault(timezone?: string): void;
}
const tz: DayjsTimezone
const tz: DayjsTimezone;
}

View File

@@ -4,10 +4,14 @@
* Source: https://github.com/iamkun/dayjs/tree/dev/src/plugin/utc
* License: MIT
*/
!function (t, e) {
!(function (t, e) {
// eslint-disable-next-line no-undef
typeof exports == "object" && typeof module != "undefined" ? module.exports = e() : typeof define == "function" && define.amd ? define(e) : (t = typeof globalThis != "undefined" ? globalThis : t || self).dayjs_plugin_timezone = e();
}(this, (function () {
typeof exports == "object" && typeof module != "undefined"
? (module.exports = e())
: typeof define == "function" && define.amd
? define(e)
: ((t = typeof globalThis != "undefined" ? globalThis : t || self).dayjs_plugin_timezone = e());
})(this, function () {
"use strict";
let t = {
year: 0,
@@ -15,7 +19,7 @@
day: 2,
hour: 3,
minute: 4,
second: 5
second: 5,
};
let e = {};
return function (n, i, o) {
@@ -23,12 +27,14 @@
let a = function (t, n, i) {
void 0 === i && (i = {});
let o = new Date(t);
let r = function (t, n) {
let r = (function (t, n) {
void 0 === n && (n = {});
let i = n.timeZoneName || "short";
let o = t + "|" + i;
let r = e[o];
return r || (r = new Intl.DateTimeFormat("en-US", {
return (
r ||
((r = new Intl.DateTimeFormat("en-US", {
hour12: !1,
timeZone: t,
year: "numeric",
@@ -37,9 +43,12 @@
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
timeZoneName: i
}), e[o] = r), r;
}(n, i);
timeZoneName: i,
})),
(e[o] = r)),
r
);
})(n, i);
return r.formatToParts(o);
};
let u = function (e, n) {
@@ -60,56 +69,62 @@
return (o.utc(v).valueOf() - (h -= h % 1e3)) / 6e4;
};
let f = i.prototype;
f.tz = function (t, e) {
((f.tz = function (t, e) {
void 0 === t && (t = r);
let n = this.utcOffset();
let i = this.toDate();
let a = i.toLocaleString("en-US", { timeZone: t }).replace("\u202f", " ");
let u = Math.round((i - new Date(a)) / 1e3 / 60);
let f = o(a).$set("millisecond", this.$ms).utcOffset(15 * -Math.round(i.getTimezoneOffset() / 15) - u, !0);
let f = o(a)
.$set("millisecond", this.$ms)
.utcOffset(15 * -Math.round(i.getTimezoneOffset() / 15) - u, !0);
if (e) {
let s = f.utcOffset();
f = f.add(n - s, "minute");
}
return f.$x.$timezone = t, f;
}, f.offsetName = function (t) {
return ((f.$x.$timezone = t), f);
}),
(f.offsetName = function (t) {
let e = this.$x.$timezone || o.tz.guess();
let n = a(this.valueOf(), e, { timeZoneName: t }).find((function (t) {
let n = a(this.valueOf(), e, { timeZoneName: t }).find(function (t) {
return t.type.toLowerCase() === "timezonename";
}));
});
return n && n.value;
};
}));
let s = f.startOf;
f.startOf = function (t, e) {
((f.startOf = function (t, e) {
if (!this.$x || !this.$x.$timezone) {
return s.call(this, t, e);
}
let n = o(this.format("YYYY-MM-DD HH:mm:ss:SSS"));
return s.call(n, t, e).tz(this.$x.$timezone, !0);
}, o.tz = function (t, e, n) {
}),
(o.tz = function (t, e, n) {
let i = n && e;
let a = n || e || r;
let f = u(+o(), a);
if (typeof t != "string") {
return o(t).tz(a);
}
let s = function (t, e, n) {
let s = (function (t, e, n) {
let i = t - 60 * e * 1e3;
let o = u(i, n);
if (e === o) {
return [ i, e ];
return [i, e];
}
let r = u(i -= 60 * (o - e) * 1e3, n);
return o === r ? [ i, o ] : [ t - 60 * Math.min(o, r) * 1e3, Math.max(o, r) ];
}(o.utc(t, i).valueOf(), f, a);
let r = u((i -= 60 * (o - e) * 1e3), n);
return o === r ? [i, o] : [t - 60 * Math.min(o, r) * 1e3, Math.max(o, r)];
})(o.utc(t, i).valueOf(), f, a);
let m = s[0];
let c = s[1];
let d = o(m).utcOffset(c);
return d.$x.$timezone = a, d;
}, o.tz.guess = function () {
return ((d.$x.$timezone = a), d);
}),
(o.tz.guess = function () {
return Intl.DateTimeFormat().resolvedOptions().timeZone;
}, o.tz.setDefault = function (t) {
}),
(o.tz.setDefault = function (t) {
r = t;
}));
};
};
}));
});

View File

@@ -13,7 +13,13 @@ function evaluateExpression(expression, context) {
*/
const operator = operatorMap.get(expression.operator) || null;
if (operator === null) {
throw new Error("Unexpected expression operator ID '" + expression.operator + "'. Expected one of [" + operatorMap.keys().join(",") + "]");
throw new Error(
"Unexpected expression operator ID '" +
expression.operator +
"'. Expected one of [" +
operatorMap.keys().join(",") +
"]"
);
}
if (!Object.prototype.hasOwnProperty.call(context, expression.variable)) {
@@ -44,7 +50,9 @@ function evaluateExpressionGroup(group, context) {
} else if (child instanceof ConditionExpressionGroup) {
childResult = evaluateExpressionGroup(child, context);
} else {
throw new Error("Invalid child type in ConditionExpressionGroup. Expected ConditionExpression or ConditionExpressionGroup");
throw new Error(
"Invalid child type in ConditionExpressionGroup. Expected ConditionExpression or ConditionExpressionGroup"
);
}
if (result === null) {
@@ -54,7 +62,11 @@ function evaluateExpressionGroup(group, context) {
} else if (child.andOr === LOGICAL.AND) {
result = result && childResult;
} else {
throw new Error("Invalid logical operator in child of ConditionExpressionGroup. Expected 'and' or 'or'. Got '" + group.andOr + "'");
throw new Error(
"Invalid logical operator in child of ConditionExpressionGroup. Expected 'and' or 'or'. Got '" +
group.andOr +
"'"
);
}
}

View File

@@ -15,7 +15,7 @@ const LOGICAL = {
* @returns {void}
*/
function processMonitorConditions(conditions, parentGroup) {
conditions.forEach(condition => {
conditions.forEach((condition) => {
const andOr = condition.andOr === LOGICAL.OR ? LOGICAL.OR : LOGICAL.AND;
if (condition.type === "group") {

View File

@@ -260,20 +260,20 @@ class GreaterThanOrEqualToOperator extends ConditionOperator {
}
const operatorMap = new Map([
[ OP_STR_EQUALS, new StringEqualsOperator ],
[ OP_STR_NOT_EQUALS, new StringNotEqualsOperator ],
[ OP_CONTAINS, new ContainsOperator ],
[ OP_NOT_CONTAINS, new NotContainsOperator ],
[ OP_STARTS_WITH, new StartsWithOperator ],
[ OP_NOT_STARTS_WITH, new NotStartsWithOperator ],
[ OP_ENDS_WITH, new EndsWithOperator ],
[ OP_NOT_ENDS_WITH, new NotEndsWithOperator ],
[ OP_NUM_EQUALS, new NumberEqualsOperator ],
[ OP_NUM_NOT_EQUALS, new NumberNotEqualsOperator ],
[ OP_LT, new LessThanOperator ],
[ OP_GT, new GreaterThanOperator ],
[ OP_LTE, new LessThanOrEqualToOperator ],
[ OP_GTE, new GreaterThanOrEqualToOperator ],
[OP_STR_EQUALS, new StringEqualsOperator()],
[OP_STR_NOT_EQUALS, new StringNotEqualsOperator()],
[OP_CONTAINS, new ContainsOperator()],
[OP_NOT_CONTAINS, new NotContainsOperator()],
[OP_STARTS_WITH, new StartsWithOperator()],
[OP_NOT_STARTS_WITH, new NotStartsWithOperator()],
[OP_ENDS_WITH, new EndsWithOperator()],
[OP_NOT_ENDS_WITH, new NotEndsWithOperator()],
[OP_NUM_EQUALS, new NumberEqualsOperator()],
[OP_NUM_NOT_EQUALS, new NumberNotEqualsOperator()],
[OP_LT, new LessThanOperator()],
[OP_GT, new GreaterThanOperator()],
[OP_LTE, new LessThanOrEqualToOperator()],
[OP_GTE, new GreaterThanOrEqualToOperator()],
]);
const defaultStringOperators = [
@@ -284,7 +284,7 @@ const defaultStringOperators = [
operatorMap.get(OP_STARTS_WITH),
operatorMap.get(OP_NOT_STARTS_WITH),
operatorMap.get(OP_ENDS_WITH),
operatorMap.get(OP_NOT_ENDS_WITH)
operatorMap.get(OP_NOT_ENDS_WITH),
];
const defaultNumberOperators = [
@@ -293,7 +293,7 @@ const defaultNumberOperators = [
operatorMap.get(OP_LT),
operatorMap.get(OP_GT),
operatorMap.get(OP_LTE),
operatorMap.get(OP_GTE)
operatorMap.get(OP_GTE),
];
module.exports = {

View File

@@ -14,9 +14,7 @@ class DnsMonitorType extends MonitorType {
supportsConditions = true;
conditionVariables = [
new ConditionVariable("record", defaultStringOperators ),
];
conditionVariables = [new ConditionVariable("record", defaultStringOperators)];
/**
* @inheritdoc
@@ -31,19 +29,19 @@ class DnsMonitorType extends MonitorType {
const conditions = ConditionExpressionGroup.fromMonitor(monitor);
let conditionsResult = true;
const handleConditions = (data) => conditions ? evaluateExpressionGroup(conditions, data) : true;
const handleConditions = (data) => (conditions ? evaluateExpressionGroup(conditions, data) : true);
switch (monitor.dns_resolve_type) {
case "A":
case "AAAA":
case "PTR":
dnsMessage = `Records: ${dnsRes.join(" | ")}`;
conditionsResult = dnsRes.some(record => handleConditions({ record }));
conditionsResult = dnsRes.some((record) => handleConditions({ record }));
break;
case "TXT":
dnsMessage = `Records: ${dnsRes.join(" | ")}`;
conditionsResult = dnsRes.flat().some(record => handleConditions({ record }));
conditionsResult = dnsRes.flat().some((record) => handleConditions({ record }));
break;
case "CNAME":
@@ -54,18 +52,23 @@ class DnsMonitorType extends MonitorType {
case "CAA":
// .filter(Boolean) was added because some CAA records do not contain an issue key, resulting in a blank list item.
// Hypothetical dnsRes [{ critical: 0, issuewild: 'letsencrypt.org' }, { critical: 0, issue: 'letsencrypt.org' }]
dnsMessage = `Records: ${dnsRes.map(record => record.issue).filter(Boolean).join(" | ")}`;
conditionsResult = dnsRes.some(record => handleConditions({ record: record.issue }));
dnsMessage = `Records: ${dnsRes
.map((record) => record.issue)
.filter(Boolean)
.join(" | ")}`;
conditionsResult = dnsRes.some((record) => handleConditions({ record: record.issue }));
break;
case "MX":
dnsMessage = dnsRes.map(record => `Hostname: ${record.exchange} - Priority: ${record.priority}`).join(" | ");
conditionsResult = dnsRes.some(record => handleConditions({ record: record.exchange }));
dnsMessage = dnsRes
.map((record) => `Hostname: ${record.exchange} - Priority: ${record.priority}`)
.join(" | ");
conditionsResult = dnsRes.some((record) => handleConditions({ record: record.exchange }));
break;
case "NS":
dnsMessage = `Servers: ${dnsRes.join(" | ")}`;
conditionsResult = dnsRes.some(record => handleConditions({ record }));
conditionsResult = dnsRes.some((record) => handleConditions({ record }));
break;
case "SOA":
@@ -74,13 +77,18 @@ class DnsMonitorType extends MonitorType {
break;
case "SRV":
dnsMessage = dnsRes.map(record => `Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight}`).join(" | ");
conditionsResult = dnsRes.some(record => handleConditions({ record: record.name }));
dnsMessage = dnsRes
.map(
(record) =>
`Name: ${record.name} | Port: ${record.port} | Priority: ${record.priority} | Weight: ${record.weight}`
)
.join(" | ");
conditionsResult = dnsRes.some((record) => handleConditions({ record: record.name }));
break;
}
if (monitor.dns_last_result !== dnsMessage && dnsMessage !== undefined) {
await R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [ dnsMessage, monitor.id ]);
await R.exec("UPDATE `monitor` SET dns_last_result = ? WHERE id = ? ", [dnsMessage, monitor.id]);
}
if (!conditionsResult) {
@@ -108,23 +116,26 @@ class DnsMonitorType extends MonitorType {
*/
async resolveDnsResolverServers(dnsResolveServer) {
// Remove all spaces, split into array, remove all elements that are empty
const addresses = dnsResolveServer.replace(/\s/g, "").split(",").filter((x) => x !== "");
const addresses = dnsResolveServer
.replace(/\s/g, "")
.split(",")
.filter((x) => x !== "");
if (!addresses.length) {
throw new Error("No Resolver Servers specified. Please specifiy at least one resolver server like 1.1.1.1 or a hostname");
throw new Error(
"No Resolver Servers specified. Please specifiy at least one resolver server like 1.1.1.1 or a hostname"
);
}
const resolver = new Resolver();
// Make promises to be resolved concurrently
const promises = addresses.map(async (e) => {
if (net.isIP(e)) { // If IPv4 or IPv6 addr, immediately return
return [ e ];
if (net.isIP(e)) {
// If IPv4 or IPv6 addr, immediately return
return [e];
}
// Otherwise, attempt to resolve hostname
const [ v4, v6 ] = await Promise.allSettled([
resolver.resolve4(e),
resolver.resolve6(e),
]);
const [v4, v6] = await Promise.allSettled([resolver.resolve4(e), resolver.resolve6(e)]);
const addrs = [
...(v4.status === "fulfilled" ? v4.value : []),
@@ -145,7 +156,9 @@ class DnsMonitorType extends MonitorType {
// only the resolver resolution can discard an address
// -> no special error message for only the net.isIP case is necessary
if (!parsed.length) {
throw new Error("None of the configured resolver servers could be resolved to an IP address. Please provide a comma-separated list of valid resolver hostnames or IP addresses.");
throw new Error(
"None of the configured resolver servers could be resolved to an IP address. Please provide a comma-separated list of valid resolver hostnames or IP addresses."
);
}
return parsed;
}
@@ -160,7 +173,7 @@ class DnsMonitorType extends MonitorType {
*/
async dnsResolve(hostname, resolverServer, resolverPort, rrtype) {
const resolver = new Resolver();
resolver.setServers(resolverServer.map(server => `[${server}]:${resolverPort}`));
resolver.setServers(resolverServer.map((server) => `[${server}]:${resolverPort}`));
if (rrtype === "PTR") {
return await resolver.reverse(hostname);
}

View File

@@ -79,4 +79,3 @@ class GroupMonitorType extends MonitorType {
module.exports = {
GroupMonitorType,
};

View File

@@ -12,17 +12,27 @@ class GrpcKeywordMonitorType extends MonitorType {
*/
async check(monitor, heartbeat, _server) {
const startTime = dayjs().valueOf();
const service = this.constructGrpcService(monitor.grpcUrl, monitor.grpcProtobuf, monitor.grpcServiceName, monitor.grpcEnableTls);
const service = this.constructGrpcService(
monitor.grpcUrl,
monitor.grpcProtobuf,
monitor.grpcServiceName,
monitor.grpcEnableTls
);
let response = await this.grpcQuery(service, monitor.grpcMethod, monitor.grpcBody);
heartbeat.ping = dayjs().valueOf() - startTime;
log.debug(this.name, "gRPC response:", response);
let keywordFound = response.toString().includes(monitor.keyword);
if (keywordFound !== !monitor.isInvertKeyword()) {
log.debug(this.name, `GRPC response [${response}] + ", but keyword [${monitor.keyword}] is ${keywordFound ? "present" : "not"} in [" + ${response} + "]"`);
log.debug(
this.name,
`GRPC response [${response}] + ", but keyword [${monitor.keyword}] is ${keywordFound ? "present" : "not"} in [" + ${response} + "]"`
);
let truncatedResponse = (response.length > 50) ? response.toString().substring(0, 47) + "..." : response;
let truncatedResponse = response.length > 50 ? response.toString().substring(0, 47) + "..." : response;
throw new Error(`keyword [${monitor.keyword}] is ${keywordFound ? "present" : "not"} in [" + ${truncatedResponse} + "]`);
throw new Error(
`keyword [${monitor.keyword}] is ${keywordFound ? "present" : "not"} in [" + ${truncatedResponse} + "]`
);
}
heartbeat.status = UP;
heartbeat.msg = `${response}, keyword [${monitor.keyword}] ${keywordFound ? "is" : "not"} found`;
@@ -42,7 +52,8 @@ class GrpcKeywordMonitorType extends MonitorType {
const Client = grpc.makeGenericClientConstructor({});
const credentials = enableTls ? grpc.credentials.createSsl() : grpc.credentials.createInsecure();
const client = new Client(url, credentials);
return protoServiceObject.create((method, requestData, cb) => {
return protoServiceObject.create(
(method, requestData, cb) => {
const fullServiceName = method.fullName;
const serviceFQDN = fullServiceName.split(".");
const serviceMethod = serviceFQDN.pop();
@@ -50,11 +61,15 @@ class GrpcKeywordMonitorType extends MonitorType {
log.debug(this.name, `gRPC method ${serviceMethodClientImpl}`);
client.makeUnaryRequest(
serviceMethodClientImpl,
arg => arg,
arg => arg,
(arg) => arg,
(arg) => arg,
requestData,
cb);
}, false, false);
cb
);
},
false,
false
);
}
/**

View File

@@ -34,5 +34,5 @@ class ManualMonitorType extends MonitorType {
}
module.exports = {
ManualMonitorType
ManualMonitorType,
};

Some files were not shown because too many files have changed in this diff Show More