Compare commits

..

2 Commits

Author SHA1 Message Date
Frank Elsinga
ff197839b3 Moved the DebugMonitorDialog into its own component 2024-10-09 04:53:35 +02:00
Frank Elsinga
c90aacc62c made sure that the debug output for editing the monitor is shell-escaped 2024-10-09 03:45:07 +02:00
606 changed files with 16595 additions and 54454 deletions

View File

@ -32,6 +32,7 @@ tsconfig.json
/extra/healthcheck.exe /extra/healthcheck.exe
/extra/healthcheck /extra/healthcheck
/extra/exe-builder /extra/exe-builder
/extra/push-examples
/extra/uptime-kuma-push /extra/uptime-kuma-push
# Comment the following line if you want to rebuild the healthcheck binary # Comment the following line if you want to rebuild the healthcheck binary

View File

@ -1,5 +1,9 @@
module.exports = { module.exports = {
ignorePatterns: ["test/*.js", "server/modules/*", "src/util.js"], ignorePatterns: [
"test/*.js",
"server/modules/*",
"src/util.js"
],
root: true, root: true,
env: { env: {
browser: true, browser: true,
@ -11,7 +15,6 @@ module.exports = {
"eslint:recommended", "eslint:recommended",
"plugin:vue/vue3-recommended", "plugin:vue/vue3-recommended",
"plugin:jsdoc/recommended-error", "plugin:jsdoc/recommended-error",
"prettier", // Disables ESLint formatting rules that conflict with Prettier
], ],
parser: "vue-eslint-parser", parser: "vue-eslint-parser",
parserOptions: { parserOptions: {
@ -19,93 +22,147 @@ module.exports = {
sourceType: "module", sourceType: "module",
requireConfigFile: false, requireConfigFile: false,
}, },
plugins: ["jsdoc", "@typescript-eslint"], plugins: [
"jsdoc",
"@typescript-eslint",
],
rules: { rules: {
yoda: "error", "yoda": "error",
eqeqeq: ["warn", "smart"], eqeqeq: [ "warn", "smart" ],
camelcase: [ "linebreak-style": [ "error", "unix" ],
"warn", "camelcase": [ "warn", {
"properties": "never",
"ignoreImports": true
}],
"no-unused-vars": [ "warn", {
"args": "none"
}],
indent: [
"error",
4,
{ {
properties: "never", ignoredNodes: [ "TemplateLiteral" ],
ignoreImports: true, SwitchCase: 1,
},
],
"no-unused-vars": [
"warn",
{
args: "none",
}, },
], ],
quotes: [ "error", "double" ],
semi: "error",
"vue/html-indent": [ "error", 4 ], // default: 2
"vue/max-attributes-per-line": "off", "vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off", "vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off", "vue/html-self-closing": "off",
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675 "vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly "vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"vue/multi-word-component-names": "off", "vue/multi-word-component-names": "off",
curly: "error", "no-multi-spaces": [ "error", {
ignoreEOLComments: true,
}],
"array-bracket-spacing": [ "warn", "always", {
"singleValue": true,
"objectsInArrays": false,
"arraysInArrays": false
}],
"space-before-function-paren": [ "error", {
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}],
"curly": "error",
"object-curly-spacing": [ "error", "always" ],
"object-curly-newline": "off",
"object-property-newline": "error",
"comma-spacing": "error",
"brace-style": "error",
"no-var": "error", "no-var": "error",
"no-throw-literal": "error", "key-spacing": "warn",
"no-constant-condition": [ "keyword-spacing": "warn",
"error", "space-infix-ops": "error",
{ "arrow-spacing": "warn",
checkLoops: false, "no-trailing-spaces": "error",
}, "no-constant-condition": [ "error", {
], "checkLoops": false,
}],
"space-before-blocks": "warn",
//"no-console": "warn", //"no-console": "warn",
"no-extra-boolean-cast": "off", "no-extra-boolean-cast": "off",
"no-multiple-empty-lines": [ "warn", {
"max": 1,
"maxBOF": 0,
}],
"lines-between-class-members": [ "warn", "always", {
exceptAfterSingleLine: true,
}],
"no-unneeded-ternary": "error", "no-unneeded-ternary": "error",
"array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ],
//"prefer-template": "error", //"prefer-template": "error",
"no-empty": [ "template-curly-spacing": [ "warn", "never" ],
"error", "comma-dangle": [ "warn", "only-multiline" ],
{ "no-empty": [ "error", {
allowEmptyCatch: true, "allowEmptyCatch": true
}, }],
],
"no-control-regex": "off", "no-control-regex": "off",
"one-var": ["error", "never"], "one-var": [ "error", "never" ],
"max-statements-per-line": ["error", { max: 1 }], "max-statements-per-line": [ "error", { "max": 1 }],
"jsdoc/check-tag-names": [ "jsdoc/check-tag-names": [
"error", "error",
{ {
definedTags: ["link"], "definedTags": [ "link" ]
}, }
], ],
"jsdoc/no-undefined-types": "off", "jsdoc/no-undefined-types": "off",
"jsdoc/no-defaults": ["error", { noOptionalParamNames: true }], "jsdoc/no-defaults": [
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/require-throws": "warn", "jsdoc/require-throws": "warn",
"jsdoc/require-jsdoc": [ "jsdoc/require-jsdoc": [
"error", "error",
{ {
require: { "require": {
FunctionDeclaration: true, "FunctionDeclaration": true,
MethodDefinition: true, "MethodDefinition": true,
}, }
}, }
], ],
"jsdoc/no-blank-block-descriptions": "error", "jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn", "jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": ["error", { reportMissingReturnForUndefinedTypes: false }], "jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns": [ "jsdoc/require-returns": [
"warn", "warn",
{ {
forceRequireReturn: true, "forceRequireReturn": true,
forceReturnsWithAsync: true, "forceReturnsWithAsync": true
}, }
], ],
"jsdoc/require-param-type": "warn", "jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn", "jsdoc/require-param-description": "warn"
}, },
overrides: [ "overrides": [
{
"files": [ "src/languages/*.js", "src/icon.js" ],
"rules": {
"comma-dangle": [ "error", "always-multiline" ],
}
},
// Override for TypeScript // Override for TypeScript
{ {
files: ["**/*.ts"], "files": [
extends: ["plugin:@typescript-eslint/recommended"], "**/*.ts",
rules: { ],
extends: [
"plugin:@typescript-eslint/recommended",
],
"rules": {
"jsdoc/require-returns-type": "off", "jsdoc/require-returns-type": "off",
"jsdoc/require-param-type": "off", "jsdoc/require-param-type": "off",
"@typescript-eslint/no-explicit-any": "off", "@typescript-eslint/no-explicit-any": "off",
"prefer-const": "off", "prefer-const": "off",
}, }
}, }
], ]
}; };

View File

@ -0,0 +1,75 @@
name: "❓ Ask for help"
description: "Submit any question related to Uptime Kuma"
#title: "[Help] "
labels: [help]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "⚠️ Please verify that this question has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=)"
options:
- label: "I checked and didn't find similar issue"
required: true
- type: checkboxes
attributes:
label: "🛡️ Security Policy"
description: Please review the security policy before reporting security related issues/bugs.
options:
- label: I agree to have read this project [Security Policy](https://github.com/louislam/uptime-kuma/security/policy)
required: true
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "📝 Describe your problem"
description: "Please walk us through it step by step. Include all important details and add screenshots where appropriate"
placeholder: "Describe what are you asking for..."
- type: textarea
id: error-msg
validations:
required: false
attributes:
label: "📝 Error Message(s) or Log"
- type: input
id: uptime-kuma-version
attributes:
label: "🐻 Uptime-Kuma Version"
description: "Which version of Uptime-Kuma are you running? Please do NOT provide the docker tag such as latest or 1"
placeholder: "Ex. 1.10.0"
validations:
required: true
- type: input
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Ubuntu 20.04 x86"
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true
- type: textarea
id: deployment-info
attributes:
label: "🖥️ Deployment Environment"
description: |
examples:
- **Runtime**: Docker 20.10.9 / nodejs 14.18.0 / K8S via ... v1.3.3 / ..
- **Database**: sqlite/embedded mariadb/external mariadb
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
- **number of monitors**: 42
value: |
- Runtime:
- Database:
- Filesystem used to store the database on:
- number of monitors:
validations:
required: true

View File

@ -1,123 +0,0 @@
---
name: ❓ Ask for help
description: |
Submit any question related to Uptime Kuma
#title: "[Help]"
labels: ["help"]
body:
- type: markdown
attributes:
value: |
🚫 **We kindly ask you to refrain from pinging maintainers unless absolutely necessary. Pings are reserved for critical/urgent issues that require immediate attention.**
- type: checkboxes
id: no-duplicate-question
attributes:
label: ⚠️ Please verify that your question has not already been reported
description: |
To avoid duplicate reports, please search for any existing issues before submitting a new one.
You can find the list of existing issues **[HERE](https://github.com/louislam/uptime-kuma/issues?q=is%3Aissue%20sort%3Acreated-desc%20)**.
options:
- label: |
I have searched the [existing issues](https://github.com/louislam/uptime-kuma/issues?q=is%3Aissue%20sort%3Acreated-desc%20) and found no similar reports.
required: true
- type: checkboxes
id: security-policy
attributes:
label: 🛡️ Security Policy
description: |
Please review and acknowledge the Security Policy before reporting any security-related issues or bugs.
You can find the full Security Policy **[HERE](https://github.com/louislam/uptime-kuma/security/policy)**.
options:
- label: |
I have read and agree to Uptime Kuma's [Security Policy](https://github.com/louislam/uptime-kuma/security/policy).
required: true
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: 📝 Describe your problem
description: |
Please walk us through it step by step.
Include all important details and add screenshots where appropriate.
placeholder: |
Describe what are you asking for ...
- type: textarea
id: error-msg
attributes:
label: 📝 Error Message(s) or Log
description: |
Please copy and paste any relevant log output.
This will be automatically formatted into code, so no need for backticks.
render: bash session
validations:
required: false
- type: input
id: uptime-kuma-version
attributes:
label: 🐻 Uptime-Kuma Version
description: |
What version of Uptime-Kuma are you running?
Please do not provide Docker tags like `latest` or `1`.
placeholder: |
e.g., 1.23.16 or 2.0.0-beta.2
validations:
required: true
- type: input
id: operating-system
attributes:
label: 💻 Operating System and Arch
description: |
Which OS is your server/device running on? (For Replit, please do not report this bug)
placeholder: |
e.g., Ubuntu Server 24.04.2 LTS (GNU/Linux 6.8.0-55-generic x86_64)
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: 🌐 Browser
description: |
Which browser are you running on? (For Replit, please do not report this bug)
placeholder: |
e.g., Google Chrome 134.0.6998.183 (Official Build) (64-bit)
validations:
required: true
- type: textarea
id: deployment-info
attributes:
label: 🖥️ Deployment Environment
description: |
Provide details about the deployment environment, including runtime components, databases, and storage configurations. This will
help assess the infrastructure and identify any potential compatibility requirements.
**Remove any fields that do not apply to your setup.**
value: |
- **Runtime Environment**:
- Docker: Version `X.X.X` (Build `Y.Y.Y`)
- Docker Compose: Version `X.X.X`
- Portainer (BE/CE): Version `X.X.X` (LTS: Yes/No)
- MariaDB: Version `X.X.X` (LTS: Yes/No)
- Node.js: Version `X.X.X` (LTS: Yes/No)
- Kubernetes (K3S/K8S): Version `X.X.X` (LTS: Yes/No, via `[method/tool]`)
- **Database**:
- SQLite: Embedded
- MariaDB: Embedded/External
- **Database Storage**:
- **Filesystem**:
- Linux: ext4/XFS/Btrfs/ZFS/F2FS
- macOS: APFS/ HFS+
- Windows: NTFS/ReFS
- **Storage Medium**: HDD/eMMC/SSD/NVMe
- **Uptime Kuma Setup**:
- Number of monitors: `X`
validations:
required: true

100
.github/ISSUE_TEMPLATE/bug_report.yaml vendored Normal file
View File

@ -0,0 +1,100 @@
name: "🐛 Bug Report"
description: "Submit a bug report to help us improve"
#title: "[Bug] "
labels: [bug]
body:
- type: textarea
id: related-issues
validations:
required: true
attributes:
label: "📑 I have found these related issues/pull requests"
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
- type: checkboxes
attributes:
label: "🛡️ Security Policy"
description: Please review the security policy before reporting security related issues/bugs.
options:
- label: I agree to have read this project [Security Policy](https://github.com/louislam/uptime-kuma/security/policy)
required: true
- type: textarea
id: description
validations:
required: false
attributes:
label: "Description"
description: "You could also upload screenshots"
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "👟 Reproduction steps"
description: "How do you trigger this bug? Please walk us through it step by step. Include all important details and add screenshots where appropriate"
placeholder: "..."
- type: textarea
id: expected-behavior
validations:
required: true
attributes:
label: "👀 Expected behavior"
description: "What did you think would happen?"
placeholder: "..."
- type: textarea
id: actual-behavior
validations:
required: true
attributes:
label: "😓 Actual Behavior"
description: "What actually happen?"
placeholder: "..."
- type: input
id: uptime-kuma-version
attributes:
label: "🐻 Uptime-Kuma Version"
description: "Which version of Uptime-Kuma are you running? Please do NOT provide the docker tag such as latest or 1"
placeholder: "Ex. 1.10.0"
validations:
required: true
- type: input
id: operating-system
attributes:
label: "💻 Operating System and Arch"
description: "Which OS is your server/device running on? (For Replit, please do not report this bug)"
placeholder: "Ex. Ubuntu 20.04 x64 "
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: "🌐 Browser"
description: "Which browser are you running on?"
placeholder: "Ex. Google Chrome 95.0.4638.69"
validations:
required: true
- type: textarea
id: deployment-info
attributes:
label: "🖥️ Deployment Environment"
description: |
examples:
- **Runtime**: Docker 20.10.9 / nodejs 18.17.1 / K8S via ... v1.3.3 / ..
- **Database**: sqlite/embedded mariadb/external mariadb
- **Filesystem used to store the database on**: Windows/ZFS/btrfs/NFSv3 on a SSD/HDD/eMMC
- **number of monitors**: 42
value: |
- Runtime:
- Database:
- Filesystem used to store the database on:
- number of monitors:
validations:
required: true
- type: textarea
id: logs
attributes:
label: "📝 Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell
validations:
required: false

View File

@ -1,152 +0,0 @@
---
name: 🐛 Bug Report
description: |
Submit a bug report to help us improve
#title: "[Bug]"
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
🚫 **We kindly ask you to refrain from pinging maintainers unless absolutely necessary. Pings are reserved for critical/urgent issues that require immediate attention.**
- type: textarea
id: related-issues
validations:
required: true
attributes:
label: 📑 I have found these related issues/pull requests
description: |
Please search for related **[ISSUES](https://github.com/louislam/uptime-kuma/issues?q=is%3Aissue%20sort%3Acreated-desc)**
and **[PULL REQUESTS](https://github.com/louislam/uptime-kuma/pulls?q=is%3Apr+sort%3Acreated-desc+)**.
Explain the differences between them or clarify if you were unable to find any related issues/pull requests.
placeholder: |
Example: This relates to issue #1, which also affects the ... system. It should not be merged because ...
- type: checkboxes
id: security-policy
attributes:
label: 🛡️ Security Policy
description: |
Please review and acknowledge the Security Policy before reporting any security-related issues or bugs. You can find the full Security Policy **[HERE](https://github.com/louislam/uptime-kuma/security/policy)**.
options:
- label: |
I have read and agree to Uptime Kuma's [Security Policy](https://github.com/louislam/uptime-kuma/security/policy).
required: true
- type: textarea
id: description
validations:
required: false
attributes:
label: 📝 Description
description: |
You could also upload screenshots
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: 👟 Reproduction steps
description: |
How do you trigger this bug? Please walk us through it step by step. Include all important details and add screenshots where appropriate
placeholder: |
...
- type: textarea
id: expected-behavior
validations:
required: true
attributes:
label: 👀 Expected behavior
description: |
What did you think would happen?
placeholder: |
...
- type: textarea
id: actual-behavior
validations:
required: true
attributes:
label: 😓 Actual Behavior
description: |
What actually happen?
placeholder: |
...
- type: input
id: uptime-kuma-version
attributes:
label: 🐻 Uptime-Kuma Version
description: |
What version of Uptime-Kuma are you running? Please do not provide Docker tags like `latest` or `1`.
placeholder: |
e.g., 1.23.16 or 2.0.0-beta.2
validations:
required: true
- type: input
id: operating-system
attributes:
label: 💻 Operating System and Arch
description: |
Which OS is your server/device running on? (For Replit, please do not
report this bug)
placeholder: |
e.g., Ubuntu Server 24.04.2 LTS (GNU/Linux 6.8.0-55-generic x86_64)
validations:
required: true
- type: input
id: browser-vendor
attributes:
label: 🌐 Browser
description: |
Which browser are you running on?
placeholder: |
e.g., Google Chrome 134.0.6998.183 (Official Build) (64-bit)
validations:
required: true
- type: textarea
id: deployment-info
attributes:
label: 🖥️ Deployment Environment
description: |
Provide details about the deployment environment, including runtime components, databases, and storage configurations. This will
help assess the infrastructure and identify any potential compatibility requirements.
**Remove any fields that do not apply to your setup.**
value: |
- **Runtime Environment**:
- Docker: Version `X.X.X` (Build `Y.Y.Y`)
- Docker Compose: Version `X.X.X`
- Portainer (BE/CE): Version `X.X.X` (LTS: Yes/No)
- MariaDB: Version `X.X.X` (LTS: Yes/No)
- Node.js: Version `X.X.X` (LTS: Yes/No)
- Kubernetes (K3S/K8S): Version `X.X.X` (LTS: Yes/No, via `[method/tool]`)
- **Database**:
- SQLite: Embedded
- MariaDB: Embedded/External
- **Database Storage**:
- **Filesystem**:
- Linux: ext4/XFS/Btrfs/ZFS/F2FS
- macOS: APFS/ HFS+
- Windows: NTFS/ReFS
- **Storage Medium**: HDD/eMMC/SSD/NVMe
- **Uptime Kuma Setup**:
- Number of monitors: `X`
validations:
required: true
- type: textarea
id: logs
attributes:
label: 📝 Relevant log output
description: |
Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: bash session
validations:
required: false

View File

@ -1,2 +0,0 @@
---
blank_issues_enabled: false

View File

@ -0,0 +1,66 @@
name: 🚀 Feature Request
description: "Submit a proposal for a new feature"
#title: "[Feature] "
labels: [feature-request]
body:
- type: textarea
id: related-issues
validations:
required: true
attributes:
label: "📑 I have found these related issues/pull requests"
description: "Search related issues by clicking [HERE](https://github.com/louislam/uptime-kuma/issues?q=) and explain what the difference between them or explain that you are unable to find any related issues"
placeholder: "Related to #1 by also touching the ... system. They should not be merged because ..."
- type: dropdown
id: feature-area
attributes:
label: "🏷️ Feature Request Type"
description: "What kind of feature request is this?"
multiple: true
options:
- API / automation options
- New notification-provider
- Change to existing notification-provider
- New monitor
- Change to existing monitor
- Dashboard
- Status-page
- Maintenance
- Deployment
- Certificate expiry
- Settings
- Other
validations:
required: true
- type: textarea
id: feature-description
validations:
required: true
attributes:
label: "🔖 Feature description"
description: "A clear and concise description of what the feature request is."
placeholder: "You should add ..."
- type: textarea
id: solution
validations:
required: true
attributes:
label: "✔️ Solution"
description: "A clear and concise description of what you want to happen."
placeholder: "In my use-case, ..."
- type: textarea
id: alternatives
validations:
required: false
attributes:
label: "❓ Alternatives"
description: "A clear and concise description of any alternative solutions or features you've considered."
placeholder: "I have considered ..."
- type: textarea
id: additional-context
validations:
required: false
attributes:
label: "📝 Additional Context"
description: "Add any other context or screenshots about the feature request here."
placeholder: "..."

View File

@ -1,70 +0,0 @@
---
name: 🚀 Feature Request
description: |
Submit a proposal for a new feature
# title: "[Feature]"
labels: ["feature-request"]
body:
- type: markdown
attributes:
value: |
### 🚫 Please Avoid Unnecessary Pinging of Maintainers
We kindly ask you to refrain from pinging maintainers unless absolutely necessary.
Pings are for critical/urgent pull requests that require immediate attention.
- type: textarea
id: related-issues
validations:
required: true
attributes:
label: 📑 I have found these related issues/pull requests
description: |
Please search for related **[ISSUES](https://github.com/louislam/uptime-kuma/issues?q=is%3Aissue%20sort%3Acreated-desc)**
and **[PULL REQUESTS](https://github.com/louislam/uptime-kuma/pulls?q=is%3Apr+sort%3Acreated-desc+)**.
Explain the differences between them or clarify if you were unable to find any related issues/pull requests.
placeholder: |
Example: This relates to issue #1, which also affects the ... system. It should not be merged because ...
- type: textarea
id: feature-description
validations:
required: true
attributes:
label: 🔖 Feature description
description: |
A clear and concise description of what the feature request is.
placeholder: |
You should add ...
- type: textarea
id: solution
validations:
required: true
attributes:
label: ✔️ Solution
description: |
A clear and concise description of what you want to happen.
placeholder: |
In my use-case, ...
- type: textarea
id: alternatives
validations:
required: false
attributes:
label: ❓ Alternatives
description: |
A clear and concise description of any alternative solutions or features you've considered.
placeholder: |
I have considered ...
- type: textarea
id: additional-context
validations:
required: false
attributes:
label: 📝 Additional Context
description: |
Add any other context or screenshots about the feature request here.
placeholder: |
...

17
.github/ISSUE_TEMPLATE/security.md vendored Normal file
View File

@ -0,0 +1,17 @@
---
name: "Security Issue"
about: "Just for alerting @louislam, do not provide any details here"
title: "Security Issue"
ref: "main"
labels:
- security
---
DO NOT PROVIDE ANY DETAILS HERE. Please privately report to https://github.com/louislam/uptime-kuma/security/advisories/new.
Why need this issue? It is because GitHub Advisory do not send a notification to @louislam, it is a workaround to do so.
Your GitHub Advisory URL:

View File

@ -1,54 +0,0 @@
---
name: 🛡️ Security Issue
description: |
Notify Louis Lam about a security concern. Please do NOT include any sensitive details in this issue.
# title: "Security Issue"
labels: ["security"]
assignees: [louislam]
body:
- type: markdown
attributes:
value: |
## ❗ IMPORTANT: DO NOT SHARE VULNERABILITY DETAILS HERE
## Please do not open issues for upstream dependency scan results.
Automated security tools often report false-positive issues that are not exploitable in the context of Uptime Kuma.
Reviewing these without concrete impact does not scale for us.
If you can demonstrate that an upstream issue is actually exploitable in Uptime Kuma (e.g. with a PoC or reproducible steps), were happy to take a look.
### ⚠️ Report a Security Vulnerability
**If you have discovered a security vulnerability, please report it securely using the GitHub Security Advisory.**
**Note**: This issue is only for notifying the maintainers of the repository, as the GitHub Security Advisory does not automatically send notifications.
- **Confidentiality**: The information you provide in the GitHub Security Advisory will initially remain confidential. However, once the vulnerability is addressed, the advisory will be publicly disclosed on GitHub.
- **Access and Visibility**: Until the advisory is published, it will only be visible to the maintainers of the repository and invited collaborators.
- **Credit**: You will be automatically credited as a contributor for identifying and reporting the vulnerability. Your contribution will be reflected in the MITRE Credit System.
- **Important Reminder**: **Do not include any sensitive or detailed vulnerability information in this issue.** This issue is only for sharing the advisory URL to notify the maintainers of the repository, not for discussing the vulnerability itself.
**Thank you for helping us keep Uptime Kuma secure!**
## **Step 1: Submit a GitHub Security Advisory**
Right-click the link below and select `Open link in new tab` to access the page.
This will keep the security issue open, allowing you to easily return and paste the Advisory URL here later.
➡️ [Create a New Security Advisory](https://github.com/louislam/uptime-kuma/security/advisories/new)
## **Step 2: Share the Advisory URL**
Once you've created your advisory, please share the URL below.
This will notify Louis Lam and enable them to take the appropriate action.
- type: textarea
id: github-advisory-url
validations:
required: true
attributes:
label: GitHub Advisory URL for @louislam
placeholder: |
Please paste the GitHub Advisory URL here. Only the URL is required.
Example: https://github.com/louislam/uptime-kuma/security/advisories/GHSA-8h5r-7t6l-q3kz

View File

@ -1,45 +1,33 @@
# Summary ⚠️⚠️⚠️ Since we do not accept all types of pull requests and do not want to waste your time. Please be sure that you have read pull request rules:
https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma
In this pull request, the following changes are made: Tick the checkbox if you understand [x]:
- [ ] I have read and understand the pull request rules.
- Foobar was changed to FooFoo, because ... # Description
<!--Please link any GitHub issues or tasks that this pull request addresses--> Fixes #(issue)
- Relates to #issue-number <!--this links related the issue--> ## Type of change
- Resolves #issue-number <!--this auto-closes the issue-->
<details> Please delete any options that are not relevant.
<summary>Please follow this checklist to avoid unnecessary back and forth (click to expand)</summary>
- [ ] ⚠️ If there are Breaking change (a fix or feature that alters existing functionality in a way that could cause issues) I have called them out - Bug fix (non-breaking change which fixes an issue)
- [ ] 🧠 I have disclosed any use of LLMs/AI in this contribution and reviewed all generated content. - User interface (UI)
I understand that I am responsible for and able to explain every line of code I submit. - New feature (non-breaking change which adds functionality)
- [ ] 🔍 Any UI changes adhere to visual style of this project. - Breaking change (a fix or feature that would cause existing functionality to not work as expected)
- [ ] 🛠️ I have self-reviewed and self-tested my code to ensure it works as expected. - Other
- [ ] 📝 I have commented my code, especially in hard-to-understand areas (e.g., using JSDoc for methods). - This change requires a documentation update
- [ ] 🤖 I added or updated automated tests where appropriate.
- [ ] 📄 Documentation updates are included (if applicable).
- [ ] 🧰 Dependency updates are listed and explained.
- [ ] ⚠️ CI passes and is green.
</details> ## Checklist
## Screenshots for Visual Changes - [ ] My code follows the style guidelines of this project
- [ ] I ran ESLint and other linters for modified files
- [ ] I have performed a self-review of my own code and tested it
- [ ] I have commented my code, particularly in hard-to-understand areas (including JSDoc for methods)
- [ ] My changes generates no new warnings
- [ ] My code needed automated testing. I have added them (this is optional task)
<!-- ## Screenshots (if any)
If this pull request introduces visual changes, please provide the following details.
If not, remove this section.
Please upload the image directly here by pasting it or dragging and dropping. Please do not use any external image service. Instead, just paste in or drag and drop the image here, and it will be uploaded automatically.
-->
- **UI Modifications**: Highlight any changes made to the user interface.
- **Before & After**: Include screenshots or comparisons (if applicable).
| Event | Before | After |
| ------------------ | --------------------- | -------------------- |
| `UP` | ![Before](image-link) | ![After](image-link) |
| `DOWN` | ![Before](image-link) | ![After](image-link) |
| Certificate-expiry | ![Before](image-link) | ![After](image-link) |
| Testing | ![Before](image-link) | ![After](image-link) |

View File

@ -1,224 +0,0 @@
# Uptime Kuma Review Guidelines
> [!NOTE]
> These review guidelines are a work in progress, and are frequently
> updated and improved, so please check back frequently for the latest version.
## Preparing for a PR Review
### Read the PR description carefully
Make sure you understand what the PR is trying to solve or implement. This could
be a bug fix, a new feature, or a refactor.
### Check the linked issues
If the PR has a linked issue, read it to better understand the context and the
reason for the change.
### Check the test coverage
Make sure relevant tests have been added or modified. If the PR adds new
functionality, there should be tests covering the change.
## General Review
### Code formatting and style
Check if the code adheres to the style guidelines of the project. Make sure
there are no unused imports, variables, `console.log` for debugging in the PR.
- [Project Style](../CONTRIBUTING.md#project-styles)
- [Coding Style](../CONTRIBUTING.md#coding-styles)
### Readability and maintainability
Is the code easy to understand for other developers? Make sure complex parts are
explained with comments about **_why_** something is done, and use clear names
to show **_how_**. Are variables and functions well-named, and is there a
consistent naming style? Also, check if the code is maintainable:
- Is it unnecessarily complex? Could it be simplified?
- Does it follow the **[Single Responsibility Principle (SRP)]**?
[Single Responsibility Principle (SRP)]: https://www.geeksforgeeks.org/single-responsibility-in-solid-design-principle/
### Documentation
Is the PR well documented? Check if the descriptions of functions, parameters,
and return values are present. Are there any changes needed to the README or
other documentation, for example, if new features or configurations are
introduced?
## Functional Review
### Testing
Ensure that the new code is properly tested. This includes unit tests,
integration tests, and if necessary, end-to-end tests.
### Test results
Did all tests pass in the CI pipeline (e.g., GitHub Actions, Travis, CircleCI)?
### Testing in different environments
If the changes depend on certain environments or configurations, verify that the
code has been tested in various environments (e.g., local development, staging,
production).
- [How to test Pull Requests](https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests)
### Edge cases and regressions
- Are there test cases for possible edge cases?
- Could this change introduce regressions in other parts of the system?
## Security
### Security issues
Check for potential security problems, such as SQL injection, XSS attacks, or
unsafe API calls. Are there passwords, tokens, or other sensitive data left in
the code by mistake?
### Authentication and authorization
Is access to sensitive data or functionality properly secured? Check that the
correct authorization and authentication mechanisms are in place.
### Security Best Practices
- Ensure that the code is free from common vulnerabilities like **SQL
injection**, **XSS attacks**, and **insecure API calls**.
- Check for proper encryption of sensitive data, and ensure that **passwords**
or **API tokens** are not hardcoded in the code.
## Performance
### Performance impact
Check if the changes negatively impact performance. This can include factors
like load times, memory usage, or other performance aspects.
### Use of external libraries
- Have the right libraries been chosen?
- Are there unnecessary dependencies that might reduce performance or increase
code complexity?
- Are these dependencies actively maintained and free of known vulnerabilities?
### Performance Best Practices
- **Measure performance** using tools like Lighthouse or profiling libraries.
- **Avoid unnecessary dependencies** that may bloat the codebase.
- Ensure that the **code does not degrade the user experience** (e.g., by
increasing load times or memory consumption).
## Compliance and Integration
### Alignment with the project
Are the changes consistent with the project goals and requirements? Ensure the
PR aligns with the architecture and design principles of the project.
### Integration
If the PR depends on other PRs or changes, verify that they integrate well with
the rest of the project. Ensure the code does not cause conflicts with other
active PRs.
### Backward compatibility
Does the change break compatibility with older versions of the software or
dependencies? If so, is there a migration plan in place?
## Logging and Error Handling
### Proper error handling
- Are errors properly caught and handled instead of being silently ignored?
- Are exceptions used appropriately?
### Logging
- Is sufficient logging included for debugging and monitoring?
- Is there excessive logging that could affect performance?
## Accessibility (for UI-related changes)
If the PR affects the user interface, ensure that it meets accessibility
standards:
- Can users navigate using only the keyboard?
- Are screen readers supported?
- Is there proper color contrast for readability?
- Are there **WCAG** (Web Content Accessibility Guidelines) compliance issues?
- Use tools like **Axe** or **Lighthouse** to evaluate accessibility.
## Providing Feedback
### Constructive feedback
Provide clear, constructive feedback on what is good and what can be improved.
If improvements are needed, be specific about what should change.
### Clarity and collaboration
Ensure your feedback is friendly and open, so the team member who submitted the
PR feels supported and motivated to make improvements.
<details><summary><b>For Maintainers only</b> (click to expand)</summary>
<p>
## Go/No-Go Decision
### Go
If the code has no issues and meets the project requirements, approve it (and
possibly merge it).
### No-Go
If there are significant issues, such as missing tests, security
vulnerabilities, or performance problems, request the necessary changes before
the PR can be approved. Some examples of **significant issues** include:
- Missing tests for new functionality.
- Identified **security vulnerabilities**.
- Code changes that break **backward compatibility** without a proper migration
plan.
- Code that causes **major performance regressions** (e.g., high CPU/memory
usage).
## After the Review
### Reordering and merging
Once the necessary changes have been made and the PR is approved, the code can
be merged into the main branch (e.g., `main` or `master`).
### Testing after merging
Ensure that the build passes after merging the PR, and re-test the functionality
in the production environment if necessary.
## Follow-up
### Communication with team members
If the PR has long-term technical or functional implications, communicate the
changes to the team.
### Monitoring
Continue monitoring the production environment for any unexpected issues that
may arise after the code has been merged.
</p>
</details>
---
This process ensures that PRs are systematically and thoroughly reviewed,
improving overall code quality.

View File

@ -1,176 +0,0 @@
# Copilot Instructions for Uptime Kuma
## Copilot's Goals/Tasks
- Check spelling
- Do not show "Pull Request Overview"
- You do not have to reply if there are no issues
## Repository Overview
**Uptime Kuma** is a self-hosted monitoring tool for HTTP(s), TCP, DNS, Docker, etc. Built with Vue 3 (frontend) and Node.js/Express (backend), using Socket.IO for real-time communication.
- **Languages**: JavaScript, Vue 3, TypeScript (limited), HTML, CSS/SCSS
- **Backend**: Node.js >= 20.4, Express.js, Socket.IO, SQLite
- **Frontend**: Vue 3, Vite, Bootstrap 5, Chart.js
- **Package Manager**: npm with `legacy-peer-deps=true` (.npmrc)
## Build & Validation Commands
### Prerequisites
- Node.js >= 20.4.0, npm >= 9.3, Git
### Essential Command Sequence
1. **Install Dependencies**:
```bash
npm ci # Use npm ci NOT npm install (~60-90 seconds)
```
2. **Linting** (required before committing):
```bash
npm run lint # Both linters (~15-30 seconds)
npm run lint:prod # For production (zero warnings)
```
3. **Build Frontend**:
```bash
npm run build # Takes ~90-120 seconds, builds to dist/
```
4. **Run Tests**:
```bash
npm run test-backend # Backend tests (~50-60 seconds)
npm test # All tests
```
### Development Workflow
```bash
npm run dev # Starts frontend (port 3000) and backend (port 3001)
```
## Project Architecture
### Directory Structure
```
/
├── server/ Backend source code
│ ├── model/ Database models (auto-mapped to tables)
│ ├── monitor-types/ Monitor type implementations
│ ├── notification-providers/ Notification integrations
│ ├── routers/ Express routers
│ ├── socket-handlers/ Socket.IO event handlers
│ ├── server.js Server entry point
│ └── uptime-kuma-server.js Main server logic
├── src/ Frontend source code (Vue 3 SPA)
│ ├── components/ Vue components
│ ├── pages/ Page components
│ ├── lang/ i18n translations
│ ├── router.js Vue Router configuration
│ └── main.js Frontend entry point
├── db/ Database related
│ ├── knex_migrations/ Knex migration files
│ └── kuma.db SQLite database (gitignored)
├── test/ Test files
│ ├── backend-test/ Backend unit tests
│ └── e2e/ Playwright E2E tests
├── config/ Build configuration
│ ├── vite.config.js Vite build config
│ └── playwright.config.js Playwright test config
├── dist/ Frontend build output (gitignored)
├── data/ App data directory (gitignored)
├── public/ Static frontend assets (dev only)
├── docker/ Docker build files
└── extra/ Utility scripts
```
### Key Configuration Files
- **package.json**: Scripts, dependencies, Node.js version requirement
- **.eslintrc.js**: ESLint rules (4 spaces, double quotes, unix line endings, JSDoc required)
- **.stylelintrc**: Stylelint rules (4 spaces indentation)
- **.editorconfig**: Editor settings (4 spaces, LF, UTF-8)
- **tsconfig-backend.json**: TypeScript config for backend (only src/util.ts)
- **.npmrc**: `legacy-peer-deps=true` (required for dependency resolution)
- **.gitignore**: Excludes node_modules, dist, data, tmp, private
### Code Style (strictly enforced by linters)
- 4 spaces indentation, double quotes, Unix line endings (LF), semicolons required
- **Naming**: JavaScript/TypeScript (camelCase), SQLite (snake_case), CSS/SCSS (kebab-case)
- JSDoc required for all functions/methods
## CI/CD Workflows
**auto-test.yml** (runs on PR/push to master/1.23.X):
- Linting, building, backend tests on multiple OS/Node versions (15 min timeout)
- E2E Playwright tests
**validate.yml**: Validates JSON/YAML files, language files, knex migrations
**PR Requirements**: All linters pass, tests pass, code follows style guidelines
## Common Issues
1. **npm install vs npm ci**: Always use `npm ci` for reproducible builds
2. **TypeScript errors**: `npm run tsc` shows 1400+ errors - ignore them, they don't affect builds
3. **Stylelint warnings**: Deprecation warnings are expected, ignore them
4. **Test failures**: Always run `npm run build` before running tests
5. **Port conflicts**: Dev server uses ports 3000 and 3001
6. **First run**: Server shows "db-config.json not found" - this is expected, starts setup wizard
## Translations
- Managed via Weblate. Add keys to `src/lang/en.json` only
- Don't include other languages in PRs
- Use `$t("key")` in Vue templates
## Database
- Primary: SQLite (also supports MariaDB/MySQL)
- Migrations in `db/knex_migrations/` using Knex.js
- Filename format validated by CI: `node ./extra/check-knex-filenames.mjs`
## Testing
- **Backend**: Node.js test runner, fast unit tests
- **E2E**: Playwright (requires `npx playwright install` first time)
- Test data in `data/playwright-test`
## Adding New Features
### New Notification Provider
Files to modify:
1. `server/notification-providers/PROVIDER_NAME.js` (backend logic)
2. `server/notification.js` (register provider)
3. `src/components/notifications/PROVIDER_NAME.vue` (frontend UI)
4. `src/components/notifications/index.js` (register frontend)
5. `src/components/NotificationDialog.vue` (add to list)
6. `src/lang/en.json` (add translation keys)
### New Monitor Type
Files to modify:
1. `server/monitor-types/MONITORING_TYPE.js` (backend logic)
2. `server/uptime-kuma-server.js` (register monitor type)
3. `src/pages/EditMonitor.vue` (frontend UI)
4. `src/lang/en.json` (add translation keys)
## Important Notes
1. **Trust these instructions** - based on testing. Search only if incomplete/incorrect
2. **Dependencies**: 5 known vulnerabilities (3 moderate, 2 high) - acknowledged, don't fix without discussion
3. **Git Branches**: `master` (v2 development), `1.23.X` (v1 maintenance)
4. **Node Version**: >= 20.4.0 required
5. **Socket.IO**: Most backend logic in `server/socket-handlers/`, not REST
6. **Never commit**: `data/`, `dist/`, `tmp/`, `private/`, `node_modules/`

View File

@ -1,22 +0,0 @@
# Dependabot configuration for Uptime Kuma
# See: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
# Enable version updates for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
# Group all GitHub Actions updates into a single PR
groups:
github-actions:
patterns:
- "*"
open-pull-requests-limit: 5
commit-message:
prefix: "chore"
include: "scope"
cooldown:
default-days: 7

View File

@ -1,144 +1,93 @@
name: Auto Test # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
concurrency: name: Auto Test
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-server
cancel-in-progress: true
on: on:
push: push:
branches: [master, 1.23.X, 3.0.0] branches: [ master, 1.23.X ]
paths-ignore:
- '*.md'
pull_request: pull_request:
permissions: {} branches: [ master, 1.23.X ]
paths-ignore:
- '*.md'
jobs: jobs:
auto-test: auto-test:
needs: [ check-linters ]
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
permissions: timeout-minutes: 15
contents: read
strategy: strategy:
fail-fast: false
matrix: matrix:
os: [macos-latest, ubuntu-22.04, windows-latest, ubuntu-22.04-arm] os: [macos-latest, ubuntu-latest, windows-latest, ARM64]
node: [ 18, 20 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
node: [20, 24]
# Also test non-LTS, but only on Ubuntu.
include:
- os: ubuntu-22.04
node: 25
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - uses: actions/checkout@v4
with: { persist-credentials: false }
- name: Cache/Restore node_modules - name: Use Node.js ${{ matrix.node }}
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1 uses: actions/setup-node@v4
id: node-modules-cache with:
with: node-version: ${{ matrix.node }}
path: node_modules - run: npm install
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }} - run: npm run build
- run: npm run test-backend
- name: Use Node.js ${{ matrix.node }} env:
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 HEADLESS_TEST: 1
with: JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
node-version: ${{ matrix.node }}
- run: npm clean-install --no-fund
- name: Rebuild native modules for ARM64
if: matrix.os == 'ubuntu-22.04-arm'
run: npm rebuild @louislam/sqlite3
- run: npm run build
- run: npm run test-backend
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
# As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works # As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
armv7-simple-test: armv7-simple-test:
runs-on: ubuntu-latest needs: [ ]
permissions: runs-on: ${{ matrix.os }}
contents: read timeout-minutes: 15
if: ${{ github.repository == 'louislam/uptime-kuma' }}
strategy: strategy:
fail-fast: false
matrix: matrix:
node: [20, 22] os: [ ARMv7 ]
node: [ 18, 20 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps: steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - run: git config --global core.autocrlf false # Mainly for Windows
with: { persist-credentials: false } - uses: actions/checkout@v4
- name: Set up QEMU - name: Use Node.js ${{ matrix.node }}
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 uses: actions/setup-node@v4
with: with:
platforms: linux/arm/v7 node-version: ${{ matrix.node }}
- run: npm ci --production
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Test on ARMv7 using Docker with QEMU
run: |
docker run --rm --platform linux/arm/v7 \
-v $PWD:/workspace \
-w /workspace \
arm32v7/node:${{ matrix.node }} \
npm clean-install --no-fund --production
check-linters: check-linters:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - uses: actions/checkout@v4
with: { persist-credentials: false }
- name: Cache/Restore node_modules - name: Use Node.js 20
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1 uses: actions/setup-node@v4
id: node-modules-cache with:
with: node-version: 20
path: node_modules - run: npm install
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }} - run: npm run lint:prod
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- run: npm clean-install --no-fund
- run: npm run lint:prod
e2e-test: e2e-test:
runs-on: ubuntu-22.04-arm needs: [ ]
permissions: runs-on: ARM64
contents: read
env:
PLAYWRIGHT_VERSION: ~1.39.0
steps: steps:
- run: git config --global core.autocrlf false # Mainly for Windows - run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - uses: actions/checkout@v4
with: { persist-credentials: false }
- name: Cache/Restore node_modules - name: Use Node.js 20
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1 uses: actions/setup-node@v4
id: node-modules-cache with:
with: node-version: 20
path: node_modules - run: npm install
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }} - run: npx playwright install
- run: npm run build
- name: Setup Node.js - run: npm run test-e2e
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
- run: npm clean-install --no-fund
- name: Rebuild native modules for ARM64
run: npm rebuild @louislam/sqlite3
- name: Install Playwright ${{ env.PLAYWRIGHT_VERSION }}
run: npx playwright@${{ env.PLAYWRIGHT_VERSION }} install
- run: npm run build
- run: npm run test-e2e

View File

@ -1,53 +0,0 @@
name: autofix.ci
on:
push:
branches: ["master", "1.23.X"]
pull_request:
permissions: {}
jobs:
autofix:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-${{ hashFiles('**/package-lock.json') }}
- name: Setup Node.js
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- name: Install dependencies
run: npm ci
- name: Update RDAP DNS data from IANA
run: wget -O server/model/rdap-dns.json https://data.iana.org/rdap/dns.json
continue-on-error: true
- name: Auto-fix JavaScript/Vue linting issues
run: npm run lint-fix:js
continue-on-error: true
- name: Auto-fix CSS/SCSS linting issues
run: npm run lint-fix:style
continue-on-error: true
- name: Auto-format code with Prettier
run: npm run fmt
continue-on-error: true
- name: Compile TypeScript
run: npm run tsc
continue-on-error: true
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@ -1,93 +0,0 @@
name: Beta Release
on:
workflow_dispatch:
inputs:
version:
description: "Beta version number (e.g., 2.1.0-beta.2)"
required: true
type: string
previous_version:
description: "Previous version tag for changelog (e.g., 2.1.0-beta.1)"
required: true
type: string
dry_run:
description: "Dry Run (The docker image will not be pushed to registries. PR will still be created.)"
required: false
type: boolean
default: false
permissions:
contents: write
pull-requests: write
jobs:
beta-release:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: master
persist-credentials: true
fetch-depth: 0 # Fetch all history for changelog generation
- name: Set up Node.js
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 24
- name: Create release branch
env:
VERSION: ${{ inputs.version }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@github.com/${{ github.repository }}.git"
# Delete remote branch if it exists
git push origin --delete "release-${VERSION}" || true
# Delete local branch if it exists
git branch -D "release-${VERSION}" || true
# For testing purpose
# git checkout beta-workflow
git checkout -b "release-${VERSION}"
- name: Install dependencies
run: npm clean-install --no-fund
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Run release-beta
env:
RELEASE_BETA_VERSION: ${{ inputs.version }}
RELEASE_PREVIOUS_VERSION: ${{ inputs.previous_version }}
DRY_RUN: ${{ inputs.dry_run }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: npm run release-beta
- name: Upload dist.tar.gz as artifact
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
with:
name: dist-${{ inputs.version }}
path: ./tmp/dist.tar.gz
retention-days: 90

View File

@ -1,48 +0,0 @@
name: Build Docker Base Images
on:
workflow_dispatch: # Allow manual trigger
permissions: {}
jobs:
build-docker-base:
runs-on: ubuntu-latest
timeout-minutes: 120
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- name: Build and push base2-slim image
run: npm run build-docker-base-slim
- name: Build and push base2 image
run: npm run build-docker-base

View File

@ -3,29 +3,23 @@ name: Close Incorrect Issue
on: on:
issues: issues:
types: [opened] types: [opened]
permissions: {}
jobs: jobs:
close-incorrect-issue: close-incorrect-issue:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
permissions:
issues: write
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest] os: [ubuntu-latest]
node-version: [20] node-version: [18]
steps: steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - uses: actions/checkout@v4
with: { persist-credentials: false }
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 uses: actions/setup-node@v4
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
- run: npm ci cache: 'npm'
- name: Close incorrect issue - run: npm ci
run: node extra/close-incorrect-issue.js ${{ secrets.GITHUB_TOKEN }} ${{ github.event.issue.number }} "$ISSUE_USER_LOGIN" - run: node extra/close-incorrect-issue.js ${{ secrets.GITHUB_TOKEN }} ${{ github.event.issue.number }} ${{ github.event.issue.user.login }}
env:
ISSUE_USER_LOGIN: ${{ github.event.issue.user.login }}

View File

@ -2,11 +2,11 @@ name: "CodeQL"
on: on:
push: push:
branches: ["master", "1.23.X"] branches: [ "master", "1.23.X"]
pull_request: pull_request:
branches: ["master", "1.23.X"] branches: [ "master", "1.23.X"]
schedule: schedule:
- cron: "16 22 * * 0" - cron: '16 22 * * 0'
jobs: jobs:
analyze: analyze:
@ -22,34 +22,22 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
language: ["go", "javascript-typescript"] language: [ 'go', 'javascript-typescript' ]
steps: steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Checkout repository
with: { persist-credentials: false } uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9 uses: github/codeql-action/init@v2
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9 uses: github/codeql-action/autobuild@v2
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9 uses: github/codeql-action/analyze@v2
with: with:
category: "/language:${{matrix.language}}" category: "/language:${{matrix.language}}"
zizmor:
runs-on: ubuntu-latest
permissions:
security-events: write
contents: read
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Run zizmor
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0

View File

@ -1,30 +0,0 @@
name: Merge Conflict Labeler
# pull_request_target is safe here because:
# 1. Only uses a pinned trusted action (by SHA)
# 2. Has minimal permissions (contents: read, pull-requests: write)
# 3. Doesn't checkout or execute any untrusted code from PRs
# 4. Only adds/removes labels based on merge conflict status
on: # zizmor: ignore[dangerous-triggers]
push:
branches:
- master
pull_request_target:
branches:
- master
types: [synchronize]
jobs:
label:
name: Labeling
runs-on: ubuntu-latest
if: ${{ github.repository == 'louislam/uptime-kuma' }}
permissions:
contents: read
pull-requests: write
steps:
- name: Apply label
uses: eps1lon/actions-label-merge-conflict@1df065ebe6e3310545d4f4c4e862e43bdca146f0 # v3.0.3
with:
dirtyLabel: "needs:resolve-merge-conflict"
repoToken: "${{ secrets.GITHUB_TOKEN }}"

25
.github/workflows/conflict_labeler.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Merge Conflict Labeler
on:
push:
branches:
- master
pull_request_target:
branches:
- master
types: [synchronize]
jobs:
label:
name: Labeling
runs-on: ubuntu-latest
if: ${{ github.repository == 'louislam/uptime-kuma' }}
permissions:
contents: read
pull-requests: write
steps:
- name: Apply label
uses: eps1lon/actions-label-merge-conflict@v3
with:
dirtyLabel: 'needs:resolve-merge-conflict'
repoToken: '${{ secrets.GITHUB_TOKEN }}'

View File

@ -0,0 +1,27 @@
name: json-yaml-validate
on:
push:
branches:
- master
pull_request:
branches:
- master
- 1.23.X
workflow_dispatch:
permissions:
contents: read
pull-requests: write # enable write permissions for pull request comments
jobs:
json-yaml-validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: json-yaml-validate
id: json-yaml-validate
uses: GrantBirki/json-yaml-validate@v2.4.0
with:
comment: "true" # enable comment mode
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions

View File

@ -1,65 +0,0 @@
name: Mark PR as draft when changes are requested
# pull_request_target is safe here because:
# 1. Does not use any external actions; only uses the GitHub CLI via run commands
# 2. Has minimal permissions
# 3. Doesn't checkout or execute any untrusted code from PRs
# 4. Only adds/removes labels or changes the draft status
on: # zizmor: ignore[dangerous-triggers]
pull_request_target:
types:
- review_submitted
- labeled
- ready_for_review
permissions: {}
jobs:
mark-draft:
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: |
(
github.event.action == 'review_submitted' &&
github.event.review.state == 'changes_requested'
) || (
github.event.action == 'labeled' &&
github.event.label.name == 'pr:please address review comments'
)
steps:
- name: Add label on requested changes
if: github.event.review.state == 'changes_requested'
env:
GH_TOKEN: ${{ github.token }}
run: |
gh issue edit "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--add-label "pr:please address review comments"
- name: Mark PR as draft
env:
GH_TOKEN: ${{ github.token }}
run: |
gh pr ready "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--undo || true
# || true to ignore the case where the pr is already a draft
ready-for-review:
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: github.event.action == 'ready_for_review'
steps:
- name: Update labels for review
env:
GH_TOKEN: ${{ github.token }}
run: |
gh issue edit "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--remove-label "pr:please address review comments" || true
gh issue edit "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--add-label "pr:needs review"

View File

@ -1,40 +0,0 @@
name: New contributor message
on:
# Safety
# This workflow uses pull_request_target so it can run with write permissions on first-time contributor PRs.
# It is safe because it does not check out or execute any code from the pull request and
# only uses the pinned, trusted plbstl/first-contribution action
pull_request_target: # zizmor: ignore[dangerous-triggers]
types: [opened, closed]
branches:
- master
permissions:
pull-requests: write
jobs:
build:
if: github.repository == 'louislam/uptime-kuma'
name: Hello new contributor
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: plbstl/first-contribution@4b2b042fffa26792504a18e49aa9543a87bec077 # v4.1.0
with:
pr-reactions: rocket
pr-opened-msg: >
Hello and thanks for lending a paw to Uptime Kuma! 🐻👋
As this is your first contribution, please be sure to check out our [Pull Request guidelines](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma).
In particular:
- Mark your PR as Draft while youre still making changes
- Mark it as Ready for review once its fully ready
If you have any design or process questions, feel free to ask them right here in this pull request - unclear documentation is a bug too.
pr-merged-msg: >
@{fc-author} congrats on your first contribution to Uptime Kuma! 🐻
We hope you enjoy contributing to our project and look forward to seeing more of your work in the future!
If you want to see your contribution in action, please see our [nightly builds here](https://hub.docker.com/layers/louislam/uptime-kuma/nightly2).

View File

@ -1,58 +0,0 @@
name: Nightly Release
on:
schedule:
# Runs at 2:00 AM UTC every day
- cron: "0 2 * * *"
workflow_dispatch: # Allow manual trigger
permissions: {}
jobs:
release-nightly:
runs-on: ubuntu-latest
timeout-minutes: 120
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node20-${{ hashFiles('**/package-lock.json') }}
- name: Install dependencies
run: npm clean-install --no-fund
- name: Run release-nightly
run: npm run release-nightly

View File

@ -1,31 +0,0 @@
name: "PR Metadata"
# if someone opens a PR, edits it, or reopens it we want to validate the title
# This is separate from the rest of the CI as the title may change without code changes
on:
# SECURITY: pull_request_target is used here to allow validation of PRs from forks.
# This is safe because:
# 1. No code from the PR is checked out
# 2. Permissions are restricted to pull-requests: read
# 3. Only a trusted third-party action is used to validate the PR title
# 4. No user-controlled code is executed
pull_request_target: # zizmor: ignore[dangerous-triggers]
types:
- opened
- edited
- reopened
- synchronize
permissions:
pull-requests: read
jobs:
pr-title:
name: Validate PR title follows https://conventionalcommits.org
runs-on: ubuntu-latest
permissions:
pull-requests: read
steps:
- uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6.1.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -2,18 +2,16 @@ name: prevent-file-change
on: on:
pull_request: pull_request:
permissions: {}
jobs: jobs:
check-file-changes: check-file-changes:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
pull-requests: read
steps: steps:
- name: Prevent file change - name: Prevent file change
uses: xalvarez/prevent-file-change-action@004d9f17c2e4a7afa037cda5f38dc55a5e9c9c06 # v1.9.1 uses: xalvarez/prevent-file-change-action@v1
with: with:
githubToken: ${{ secrets.GITHUB_TOKEN }} githubToken: ${{ secrets.GITHUB_TOKEN }}
# Regex, /src/lang/*.json is not allowed to be changed, except for /src/lang/en.json # Regex, /src/lang/*.json is not allowed to be changed, except for /src/lang/en.json
pattern: '^(?!src/lang/en\.json$)src/lang/.*\.json$' pattern: '^(?!src/lang/en\.json$)src/lang/.*\.json$'
trustedAuthors: UptimeKumaBot trustedAuthors: UptimeKumaBot

View File

@ -1,19 +1,15 @@
name: "Automatically close stale issues" name: 'Automatically close stale issues'
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: "0 */6 * * *" - cron: '0 */6 * * *'
#Run every 6 hours #Run every 6 hours
permissions: {}
jobs: jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
actions: write
issues: write
steps: steps:
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1 - uses: actions/stale@v9
with: with:
stale-issue-message: |- stale-issue-message: |-
We are clearing up our old `help`-issues and your issue has been open for 60 days with no activity. We are clearing up our old `help`-issues and your issue has been open for 60 days with no activity.
@ -22,16 +18,16 @@ jobs:
days-before-close: 7 days-before-close: 7
days-before-pr-stale: -1 days-before-pr-stale: -1
days-before-pr-close: -1 days-before-pr-close: -1
exempt-issue-labels: "News,discussion,bug,doc,feature-request" exempt-issue-labels: 'News,Medium,High,discussion,bug,doc,feature-request'
exempt-issue-assignees: "louislam" exempt-issue-assignees: 'louislam'
operations-per-run: 200 operations-per-run: 200
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1 - uses: actions/stale@v9
with: with:
stale-issue-message: |- stale-issue-message: |-
This issue was marked as `cannot-reproduce` by a maintainer. This issue was marked as `cannot-reproduce` by a maintainer.
If an issue is non-reproducible, we cannot fix it, as we do not know what the underlying issue is. If an issue is non-reproducible, we cannot fix it, as we do not know what the underlying issue is.
If you have any ideas how we can reproduce this issue, we would love to hear them. If you have any ideas how we can reproduce this issue, we would love to hear them.
We don't have a good way to deal with truely unreproducible issues and are going to close this issue in a month. We don't have a good way to deal with truely unreproducible issues and are going to close this issue in a month.
If think there might be other differences in our environment or in how we tried to reproduce this, we would appreciate any ideas. If think there might be other differences in our environment or in how we tried to reproduce this, we would appreciate any ideas.
close-issue-message: |- close-issue-message: |-
@ -41,5 +37,6 @@ jobs:
days-before-close: 30 days-before-close: 30
days-before-pr-stale: -1 days-before-pr-stale: -1
days-before-pr-close: -1 days-before-pr-close: -1
any-of-issue-labels: "cannot-reproduce" any-of-issue-labels: 'cannot-reproduce'
operations-per-run: 200 operations-per-run: 200

View File

@ -1,47 +0,0 @@
name: validate
on:
push:
branches:
- master
pull_request:
branches:
- master
- 1.23.X
workflow_dispatch:
permissions: {}
jobs:
json-yaml-validate:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write # enable write permissions for pull request comments
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: json-yaml-validate
id: json-yaml-validate
uses: GrantBirki/json-yaml-validate@9bbaa8474e3af4e91f25eda8ac194fdc30564d96 # v4.0.0
with:
comment: "true" # enable comment mode
exclude_file: ".github/config/exclude.txt" # gitignore style file for exclusions
# General validations
validate:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- name: Validate language JSON files
run: node ./extra/check-lang-json.js
- name: Validate knex migrations filename
run: node ./extra/check-knex-filenames.mjs

View File

@ -1,2 +0,0 @@
# language files
src/lang/*.json

View File

@ -1,65 +0,0 @@
/**
* Prettier Configuration for Uptime Kuma
*
* Usage:
* npm run fmt - Format all files (auto-runs in CI via autofix workflow)
* npm run fmt -- --check - Check formatting without making changes
*
* TIP: This formatter is automatically run in CI, so no need to worry about it
*/
module.exports = {
// Core formatting options - matching original ESLint rules
semi: true,
singleQuote: false,
trailingComma: "es5",
printWidth: 120,
tabWidth: 4,
useTabs: false,
endOfLine: "lf",
arrowParens: "always",
bracketSpacing: true,
bracketSameLine: false,
// Vue-specific settings
vueIndentScriptAndStyle: false,
singleAttributePerLine: false,
htmlWhitespaceSensitivity: "ignore", // More forgiving with whitespace in HTML
// Override settings for specific file types
overrides: [
{
files: "*.vue",
options: {
parser: "vue",
},
},
{
files: ["*.json"],
options: {
tabWidth: 4,
trailingComma: "none",
},
},
{
files: ["*.yml", "*.yaml"],
options: {
tabWidth: 2,
trailingComma: "none",
},
},
{
files: ["src/icon.js"],
options: {
trailingComma: "all",
},
},
{
files: ["*.md"],
options: {
printWidth: 100,
proseWrap: "preserve",
tabWidth: 2,
},
},
],
};

View File

@ -1,11 +1,10 @@
{ {
"extends": [ "extends": "stylelint-config-standard",
"stylelint-config-standard",
"stylelint-config-prettier"
],
"customSyntax": "postcss-html", "customSyntax": "postcss-html",
"rules": { "rules": {
"indentation": 4,
"no-descending-specificity": null, "no-descending-specificity": null,
"selector-list-comma-newline-after": null,
"declaration-empty-line-before": null, "declaration-empty-line-before": null,
"alpha-value-notation": "number", "alpha-value-notation": "number",
"color-function-notation": "legacy", "color-function-notation": "legacy",

View File

@ -6,8 +6,8 @@ We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status, identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual nationality, personal appearance, race, religion, or sexual identity
identity and orientation. and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming, We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community. diverse, inclusive, and healthy community.
@ -17,23 +17,23 @@ diverse, inclusive, and healthy community.
Examples of behavior that contributes to a positive environment for our Examples of behavior that contributes to a positive environment for our
community include: community include:
- Demonstrating empathy and kindness toward other people * Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences * Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback * Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes, * Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience and learning from the experience
- Focusing on what is best not just for us as individuals, but for the overall * Focusing on what is best not just for us as individuals, but for the
community overall community
Examples of unacceptable behavior include: Examples of unacceptable behavior include:
- The use of sexualized language or imagery, and sexual attention or advances of * The use of sexualized language or imagery, and sexual attention or
any kind advances of any kind
- Trolling, insulting or derogatory comments, and personal or political attacks * Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment * Public or private harassment
- Publishing others' private information, such as a physical or email address, * Publishing others' private information, such as a physical or email
without their explicit permission address, without their explicit permission
- Other conduct which could reasonably be considered inappropriate in a * Other conduct which could reasonably be considered inappropriate in a
professional setting professional setting
## Enforcement Responsibilities ## Enforcement Responsibilities
@ -52,7 +52,7 @@ decisions when appropriate.
This Code of Conduct applies within all community spaces, and also applies when This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces. an individual is officially representing the community in public spaces.
Examples of representing our community include using an official email address, Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed posting via an official social media account, or acting as an appointed
representative at an online or offline event. representative at an online or offline event.
@ -60,8 +60,8 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at reported to the community leaders responsible for enforcement at
<uptime@kuma.pet>. All complaints will be reviewed and investigated promptly and uptime@kuma.pet.
fairly. All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the All community leaders are obligated to respect the privacy and security of the
reporter of any incident. reporter of any incident.
@ -82,15 +82,15 @@ behavior was inappropriate. A public apology may be requested.
### 2. Warning ### 2. Warning
**Community Impact**: A violation through a single incident or series of **Community Impact**: A violation through a single incident or series
actions. of actions.
**Consequence**: A warning with consequences for continued behavior. No **Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent like social media. Violating these terms may lead to a temporary or
ban. permanent ban.
### 3. Temporary Ban ### 3. Temporary Ban
@ -109,24 +109,20 @@ Violating these terms may lead to a permanent ban.
standards, including sustained inappropriate behavior, harassment of an standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals. individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the **Consequence**: A permanent ban from any sort of public interaction within
community. the community.
## Attribution ## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at version 2.0, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder][Mozilla CoC]. enforcement ladder](https://github.com/mozilla/diversity).
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org [homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity For answers to common questions about this code of conduct, see the FAQ at
[FAQ]: https://www.contributor-covenant.org/faq https://www.contributor-covenant.org/faq. Translations are available at
[translations]: https://www.contributor-covenant.org/translations https://www.contributor-covenant.org/translations.

View File

@ -1,20 +1,16 @@
# Project Info # Project Info
First of all, I want to thank everyone who has submitted issues or shared pull First of all, I want to thank everyone who have wrote issues or shared pull requests for Uptime Kuma.
requests for Uptime Kuma. I never thought the GitHub community would be so nice! I never thought the GitHub community would be so nice!
Because of this, I also never thought that other people would actually read and Because of this, I also never thought that other people would actually read and edit my code.
edit my code. Parts of the code are not very well-structured or commented, sorry Parts of the code are not very well-structured or commented, sorry about that.
about that.
Before you start, please read our [Code of Conduct](CODE_OF_CONDUCT.md) to understand our community standards. The project was created with `vite.js` and is written in `vue3`.
Our backend lives in the `server`-directory and mostly communicates via websockets.
The project was created with `vite` and is written in `vue3`. Our backend
lives in the `server`-directory and mostly communicates via websockets.
Both frontend and backend share the same `package.json`. Both frontend and backend share the same `package.json`.
For production, the frontend is built into the `dist`-directory and the server For production, the frontend is build into `dist`-directory and the server (`express.js`) exposes the `dist` directory as the root of the endpoint.
(`express.js`) exposes the `dist` directory as the root of the endpoint. For For development, we run vite in development mode on another port.
development, we run vite in development mode on another port.
## Directories ## Directories
@ -29,275 +25,193 @@ development, we run vite in development mode on another port.
- `src` (Frontend source code) - `src` (Frontend source code)
- `test` (unit test) - `test` (unit test)
## Can I Create a Pull Request for Uptime Kuma? ## Can I create a pull request for Uptime Kuma?
Whether or not you can create a pull request depends on the nature of your Yes or no, it depends on what you will try to do.
contribution. We value both your time and our maintainers' time, so we want to Both your and our maintainers time is precious, and we don't want to waste both time.
make sure it's spent efficiently.
If you're unsure about any process or step, you're probably not the only one If you have any questions about any process/.. is not clear, you are likely not alone => please ask them ^^
with that question—please feel free to ask. We're happy to help!
Different types of pull requests (PRs) may have different guidelines, so be sure Different guidelines exist for different types of pull requests (PRs):
to review the appropriate one for your contribution. - <details><summary><b>security fixes</b></summary>
- <details><summary><b>Security Fixes</b> (click to expand)</summary>
<p> <p>
Submitting security fixes is something that may put the community at risk. Submitting security fixes is something that may put the community at risk.
Please read through our [security policy](SECURITY.md) and submit Please read through our [security policy](SECURITY.md) and submit vulnerabilities via an [advisory](https://github.com/louislam/uptime-kuma/security/advisories/new) + [issue](https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md) instead.
vulnerabilities via an [advisory] + [issue] instead. We encourage you to We encourage you to submit how to fix a vulnerability if you know how to, this is not required.
submit how to fix a vulnerability if you know how to, this is not required. Following the security policy allows us to properly test, fix bugs.
Following the security policy allows us to properly test, fix bugs. This This review allows us to notice, if there are any changes necessary to unrelated parts like the documentation.
review allows us to notice, if there are any changes necessary to unrelated
parts like the documentation.
[**PLEASE SEE OUR SECURITY POLICY.**](SECURITY.md) [**PLEASE SEE OUR SECURITY POLICY.**](SECURITY.md)
[advisory]: https://github.com/louislam/uptime-kuma/security/advisories/new
[issue]: https://github.com/louislam/uptime-kuma/issues/new?template=security_issue.yml
</p> </p>
</details> </details>
- <details><summary><b>small, non-breaking bug fixes</b></summary>
- <details><summary><b>Small, Non-Breaking Bug Fixes</b> (click to expand)</summary>
<p> <p>
If you come across a bug and think you can solve, we appreciate your work. If you come across a bug and think you can solve, we appreciate your work.
Please make sure that you follow these rules: Please make sure that you follow by these rules:
- keep the PR as small as possible, fix only one thing at a time => keeping it - keep the PR as small as possible, fix only one thing at a time => keeping it reviewable
reviewable - test that your code does what you came it does.
- test that your code does what you claim it does.
<sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
<sub>Because maintainer time is precious, junior maintainers may merge
uncontroversial PRs in this area.</sub>
</p> </p>
</details> </details>
- <details><summary><b>translations / internationalisation (i18n)</b></summary>
- <details><summary><b>Translations / Internationalisation (i18n)</b> (click to expand)</summary>
<p> <p>
Please add **all** strings that are translatable to `src/lang/en.json`. If translation keys are omitted, they cannot be translated. **Do not include any other languages in your initial pull request** (even if it is your mother tongue) to avoid merge conflicts between Weblate and `master`. Once your PR is merged into `master`, the strings can be translated by awesome people donating their language skills. We use weblate to localise this project into many languages.
If you are unhappy with a translation this is the best start.
We use Weblate to localise this project into many languages. If you want to help translate Uptime Kuma into your language, please see [these instructions on how to translate using Weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md). On how to translate using weblate, please see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
There are some cases where a change cannot be done directly in Weblate and requires a PR: There are two cases in which a change cannot be done in weblate and requires a PR:
- A text may not yet be localisable. In this case, **adding a new language key** via `{{ $t("Translation key") }}` or [`<i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html) might be necessary. - A text may not be currently localisable. In this case, **adding a new language key** via `$t("languageKey")` might be nessesary
- Language keys need to be **added to `en.json`** to appear in Weblate. If this has not been done, a PR is appreciated. - language keys need to be **added to `en.json`** to be visible in weblate. If this has not happened, a PR is appreciated.
- **Adding a new language** requires creating a new file. See [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md). - **Adding a new language** requires a new file see [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md)
<sub>Because maintainer time is precious, junior maintainers may merge uncontroversial PRs in this area.</sub> <sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
</p> </p>
</details> </details>
- <details><summary><b>new notification providers</b></summary>
- <details><summary><b>New Notification Providers</b> (click to expand)</summary>
<p> <p>
To set up a new notification provider these files need to be modified/created: To set up a new notification provider these files need to be modified/created:
- `server/notification-providers/PROVIDER_NAME.js` is where the heart of the - `server/notification-providers/PROVIDER_NAME.js` is where the heart of the notification provider lives.
notification provider lives. - Both `monitorJSON` and `heartbeatJSON` can be `null` for some events.
If both are `null`, this is a general testing message, but if just `heartbeatJSON` is `null` this is a certificate expiry.
- Both `monitorJSON` and `heartbeatJSON` can be `null` for some events. If - Please wrap the axios call into a
```js
both are `null`, this is a general testing message, but if just try {
`heartbeatJSON` is `null` this is a certificate expiry. let result = await axios.post(...);
if (result.status === ...) ...
- Please wrap the axios call into a } catch (error) {
this.throwGeneralAxiosError(error);
```js }
try { ```
let result = await axios.post(...); - `server/notification.js` is where the backend of the notification provider needs to be registered.
if (result.status === ...) ... *If you have an idea how we can skip this step, we would love to hear about it ^^*
} catch (error) { - `src/components/NotificationDialog.vue` you need to decide if the provider is a regional or a global one and add it with a name to the respective list
this.throwGeneralAxiosError(error); - `src/components/notifications/PROVIDER_NAME.vue` is where the frontend of each provider lives.
} Please make sure that you have:
``` - used `HiddenInput` for secret credentials
- included all the necessary helptexts/placeholder/.. to make sure the notification provider is simple to setup for new users.
- `server/notification.js` is where the backend of the notification provider - include all translations (`{{ $t("Translation key") }}`, [`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) in `src/lang/en.json` to enable our translators to translate this
needs to be registered. _If you have an idea how we can skip this step, we - `src/components/notifications/index.js` is where the frontend of the provider needs to be registered.
would love to hear about it ^^_ *If you have an idea how we can skip this step, we would love to hear about it ^^*
- `src/components/NotificationDialog.vue` you need to decide if the provider
is a regional or a global one and add it with a name to the respective list
- `src/components/notifications/PROVIDER_NAME.vue` is where the frontend of
each provider lives. Please make sure that you have:
- used `HiddenInput` for secret credentials
- included all the necessary helptexts/placeholder/.. to make sure the
notification provider is simple to setup for new users. - include all
translations (`{{ $t("Translation key") }}`,
[`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html))
in `src/lang/en.json` to enable our translators to translate this
- `src/components/notifications/index.js` is where the frontend of the
provider needs to be registered. _If you have an idea how we can skip this
step, we would love to hear about it ^^_
Offering notifications is close to the core of what we are as an uptime
monitor. Therefore, making sure that they work is also really important.
Because testing notification providers is quite time intensive, we mostly
offload this onto the person contributing a notification provider.
To make sure you have tested the notification provider, please include
screenshots of the following events in the pull-request description:
- `UP`/`DOWN`
- Certificate Expiry via <https://expired.badssl.com/>
- Domain Expiry via <https://google.com/> and a larger time set
- Testing (the test button on the notification provider setup page)
<br/>
Offering notifications is close to the core of what we are as an uptime monitor.
Therefore, making sure that they work is also really important.
Because testing notification providers is quite time intensive, we mostly offload this onto the person contributing a notification provider.
To make shure you have tested the notification provider, please include screenshots of the following events in the pull-request description:
- `UP`/`DOWN`
- Certificate Expiry via https://expired.badssl.com/
- Testing (the test button on the notification provider setup page)
Using the following way to format this is encouraged: Using the following way to format this is encouraged:
```md ```md
| Event | Before | After | | Event | Before | After |
| ------------------ | --------------------- | -------------------- | ------------------
| `UP` | ![Before](image-link) | ![After](image-link) | | `UP` | paste-image-here | paste-image-here |
| `DOWN` | ![Before](image-link) | ![After](image-link) | | `DOWN` | paste-image-here | paste-image-here |
| Certificate-expiry | ![Before](image-link) | ![After](image-link) | | Certificate-expiry | paste-image-here | paste-image-here |
| Domain-expiry | ![Before](image-link) | ![After](image-link) | | Testing | paste-image-here | paste-image-here |
| Testing | ![Before](image-link) | ![After](image-link) |
``` ```
<sub>Because maintainer time is precious, junior maintainers may merge <sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
uncontroversial PRs in this area.</sub>
</p> </p>
</details> </details>
- <details><summary><b>new monitoring types</b></summary>
- <details><summary><b>New Monitoring Types</b> (click to expand)</summary>
<p> <p>
To set up a new notification provider these files need to be modified/created: To set up a new notification provider these files need to be modified/created:
- `server/monitor-types/MONITORING_TYPE.js` is the core of each monitor. - `server/monitor-types/MONITORING_TYPE.js` is the core of each monitor.
The `async check(...)`-function should: the `async check(...)`-function should:
- in the happy-path: set `heartbeat.msg` to a successful message and set `heartbeat.status = UP` - throw an error for each fault that is detected with an actionable error message
- in the unhappy-path: throw an `Error` for each fault that is detected with an actionable error message. - in the happy-path, you should set `heartbeat.msg` to a successful message and set `heartbeat.status = UP`
- NEVER set `heartbeat.status = DOWN` unless you want to explicitly ignore retries. - `server/uptime-kuma-server.js` is where the monitoring backend needs to be registered.
*If you have an idea how we can skip this step, we would love to hear about it ^^*
- `server/uptime-kuma-server.js` is where the monitoring backend needs to be
registered. _If you have an idea how we can skip this step, we would love to
hear about it ^^_
- `src/pages/EditMonitor.vue` is the shared frontend users interact with. - `src/pages/EditMonitor.vue` is the shared frontend users interact with.
Please make sure that you have: - used `HiddenInput` for secret Please make sure that you have:
credentials - included all the necessary helptexts/placeholder/.. to make - used `HiddenInput` for secret credentials
sure the notification provider is simple to setup for new users. - include - included all the necessary helptexts/placeholder/.. to make sure the notification provider is simple to setup for new users.
all translations (`{{ $t("Translation key") }}`, - include all translations (`{{ $t("Translation key") }}`, [`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) in `src/lang/en.json` to enable our translators to translate this
[`<i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html)) -
in `src/lang/en.json` to enable our translators to translate this
<sub>Because maintainer time is precious, junior maintainers may merge
uncontroversial PRs in this area.</sub>
<sub>Because maintainer time is precious junior maintainers may merge uncontroversial PRs in this area.</sub>
</p>
</details>
- <details><summary><b>new features/ major changes / breaking bugfixes</b></summary>
<p>
be sure to **create an empty draft pull request or open an issue, so we can have a discussion first**.
This is especially important for a large pull request or you don't know if it will be merged or not.
<sub>Because of the large impact of this work, only senior maintainers may merge PRs in this area.</sub>
</p> </p>
</details> </details>
- <details><summary><b>New Features / Major Changes / Breaking Bugfixes</b> (click to expand)</summary> The following rules are essential for making your PR mergable:
<p> - Merging multiple issues by a huge PR is more difficult to review and causes conflicts with other PRs. Please
be sure to **create an empty draft pull request or open an issue, so we can
have a discussion first**.
This is especially important for large pull requests or when you don't know if it will be merged or not.
When adding new features, please also add tests to ensure your changes work as expected and to prevent future regressions.
<sub>Because of the large impact of this work, only senior maintainers may
merge PRs in this area. </sub>
</p>
</details>
- <details><summary><b>As a First-Time Contributor</b> (click to expand)</summary>
<p>
Contributing is easy and fun. We will guide you through the process:
1. **Fork** the [Uptime-Kuma repository](https://github.com/louislam/uptime-kuma/) and **clone** it to your local machine.
2. **Create a new branch** for your changes (e.g., `signal-notification-provider`).
3. **Make your changes** and **commit** them with a clear message.
4. **Push** your changes to your forked repository.
5. **Open a pull request** to the `master` branch of the Uptime Kuma repository.
- For large changes, please open a **draft pull request** first to discuss the changes with the maintainers.
6. **Provide a clear and concise description** of the changes you've made and link any related issues.
7. **Complete the PR checklist** and make sure all CI checks pass.
8. **Request a review** when your pull request is ready.
## When Can You Change the PR Status to "Ready for Review"?
A PR should remain in **draft status** until all tasks are completed.
Only change the status to **Ready for Review** when:
- You have implemented all planned changes.
- Your code is fully tested and ready for review.
- You have updated or created the necessary tests.
- You have verified that CI/CD checks pass successfully.
A volunteer maintainer will review your PR as soon as possible.
You can help us by reviewing other PRs or taking a look at open issues.
## The following rules are essential for making your PR mergeable
- Merging multiple issues by a huge PR is more difficult to review and causes
conflicts with other PRs. Please
- (if possible) **create one PR for one issue** or - (if possible) **create one PR for one issue** or
- (if not possible) **explain which issues a PR addresses and why this PR - (if not possible) **explain which issues a PR addresses and why this PR should not be broken apart**
should not be broken apart** - Make sure your **PR passes our continuous integration**.
PRs will not be merged unless all CI-Checks are green.
- Make sure your **PR passes our continuous integration**. PRs will not be - **Breaking changes** (unless for a good reason and discussed beforehand) will not get merged / not get merged quickly.
merged unless all CI-Checks are green. Such changes require a major version release.
- **Breaking changes** (unless for a good reason and discussed beforehand) will - **Test your code** before submitting a PR.
not get merged / not get merged quickly. Such changes require a major version Buggy PRs will not be merged.
release.
- **Test your code** before submitting a PR. Buggy PRs will not be merged.
- Make sure the **UI/UX is close to Uptime Kuma**. - Make sure the **UI/UX is close to Uptime Kuma**.
- **Think about the maintainability**: Don't add functionality that is - **Think about the maintainability**:
completely **out of scope**. Keep in mind that we need to be able to maintain Don't add functionality that is completely **out of scope**.
the functionality. Keep in mind that we need to be able to maintain the functionality.
- Don't modify or delete existing logic without a valid reason. - Don't modify or delete existing logic without a valid reason.
- Don't convert existing code into other programming languages for no reason. - Don't convert existing code into other programming languages for no reason.
### Continuous Integration
All pull requests must pass our continuous integration checks. These checks include:
- **Linting**: We use ESLint and Stylelint for code quality checks. You can run the linter locally with `npm run lint`.
- **Formatting**: We use Prettier for code formatting. You can format your code with `npm run fmt` (or CI will do this for you)
- **Testing**: We use Playwright for end-to-end tests and have a suite of backend tests. You can run the tests locally with `npm test`.
I ([@louislam](https://github.com/louislam)) have the final say. I ([@louislam](https://github.com/louislam)) have the final say.
If your pull request does not meet my expectations, I will reject it, no matter how much time If your pull request does not meet my expectations, I will reject it, no matter how much time you spent on it.
you spent on it. Therefore, it is essential to have a discussion beforehand.
We will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if we plan to review and merge it. I will assign your pull request to a [milestone](https://github.com/louislam/uptime-kuma/milestones), if I plan to review and merge it.
Please don't rush or ask for an ETA. Please don't rush or ask for an ETA.
We have to understand the pull request, make sure it has no breaking changes and stick to the vision of this project, especially for large pull requests. We have to understand the pull request, make sure it has no breaking changes and stick to the vision of this project, especially for large pull requests.
## I'd Like to Work on an Issue. How Do I Do That?
We have found that assigning people to issues is unnecessary management ## I'd like to work on an issue. How do I do that?
overhead. Instead, a short comment stating that you want to work on an issue is
appreciated, as it saves time for other developers. If you encounter any
problems during development, feel free to leave a comment describing what you
are stuck on. We are here to help.
## Project Style We have found that assigning people to issues is management-overhead that we don't need.
A short comment that you want to try your hand at this issue is appreciated to save other devs time.
If you come across any problem during development, feel free to leave a comment with what you are stuck on.
I personally do not like something that requires a lot of configuration before ### Recommended Pull Request Guideline
you can finally start the app. The goal is to make the Uptime Kuma installation
as easy as installing a mobile app. Before diving deep into coding, having a discussion first by creating an empty pull request for discussion is preferred.
The rationale behind this is that we can align the direction and scope of the feature to eliminate any conflicts with existing and planned work, and can help by pointing out any potential pitfalls.
1. Fork the project
2. Clone your fork repo to local
3. Create a new branch
4. Create an empty commit: `git commit -m "<YOUR TASK NAME>" --allow-empty`
5. Push to your fork repo
6. Prepare a pull request: https://github.com/louislam/uptime-kuma/compare
7. Write a proper description. You can mention @louislam in it, so @louislam will get the notification.
8. Create your pull request as a Draft
9. Wait for the discussion
## Project Styles
I personally do not like something that requires so many configurations before you can finally start the app.
The goal is to make the Uptime Kuma installation as easy as installing a mobile app.
- Easy to install for non-Docker users - Easy to install for non-Docker users
- no native build dependency is needed (for `x86_64`/`armv7`/`arm64`) - no native build dependency is needed (for `x86_64`/`armv7`/`arm64`)
- no extra configuration and - no extra configuration and
- no extra effort required to get it running - no extra effort required to get it running
- Single container for Docker users - Single container for Docker users
- no complex docker-compose file - no complex docker-compose file
- mapping the volume and exposing the port should be the only requirements - mapping the volume and exposing the port should be the only requirements
- Settings should be configurable in the frontend. Environment variables are discouraged, unless it is related to startup such as `DATA_DIR`
- Settings should be configurable in the frontend. Environment variables are
discouraged, unless it is related to startup such as `DATA_DIR`
- Easy to use - Easy to use
- The web UI styling should be consistent and nice - The web UI styling should be consistent and nice
@ -316,21 +230,16 @@ as easy as installing a mobile app.
## Tools ## Tools
- [`Node.js`](https://nodejs.org/) >= 20.4.0 - [`Node.js`](https://nodejs.org/) >= 18
- [`npm`](https://www.npmjs.com/) >= 9.3 - [`npm`](https://www.npmjs.com/) >= 9.3
- [`git`](https://git-scm.com/) - [`git`](https://git-scm.com/)
- IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using - IDE that supports [`ESLint`](https://eslint.org/) and EditorConfig (I am using [`IntelliJ IDEA`](https://www.jetbrains.com/idea/))
[`IntelliJ IDEA`](https://www.jetbrains.com/idea/)) - A SQLite GUI tool (f.ex. [`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or [`DBeaver Community`](https://dbeaver.io/download/))
- A SQLite GUI tool (f.ex.
[`SQLite Expert Personal`](https://www.sqliteexpert.com/download.html) or
[`DBeaver Community`](https://dbeaver.io/download/))
## Git Branches ## Git Branches
- `master`: 2.X.X development. If you want to add a new feature, your pull - `master`: 2.X.X development. If you want to add a new feature, your pull request should base on this.
request should base on this. - `1.23.X`: 1.23.X development. If you want to fix a bug for v1 and v2, your pull request should base on this.
- `1.23.X`: 1.23.X development. If you want to fix a bug for v1 and v2, your
pull request should base on this.
- All other branches are unused, outdated or for dev. - All other branches are unused, outdated or for dev.
## Install Dependencies for Development ## Install Dependencies for Development
@ -341,6 +250,8 @@ npm ci
## Dev Server ## Dev Server
(2022-04-26 Update)
We can start the frontend dev server and the backend dev server in one command. We can start the frontend dev server and the backend dev server in one command.
Port `3000` and port `3001` will be used. Port `3000` and port `3001` will be used.
@ -349,8 +260,7 @@ Port `3000` and port `3001` will be used.
npm run dev npm run dev
``` ```
But sometimes you may want to restart the server without restarting the But sometimes, you would like to restart the server, but not the frontend, you can run these commands in two terminals:
frontend. In that case, you can run these commands in two terminals:
```bash ```bash
npm run start-frontend-dev npm run start-frontend-dev
@ -361,9 +271,9 @@ npm run start-server-dev
It binds to `0.0.0.0:3001` by default. It binds to `0.0.0.0:3001` by default.
The backend is an `express.js` server with `socket.io` integrated. It uses The backend is an `express.js` server with `socket.io` integrated.
`socket.io` to communicate with clients, and most server logic is encapsulated It uses `socket.io` to communicate with clients, and most server logic is encapsulated in the `socket.io` handlers.
in the `socket.io` handlers. `express.js` is also used to serve: `express.js` is also used to serve:
- as an entry point for redirecting to a status page or the dashboard - as an entry point for redirecting to a status page or the dashboard
- the frontend built files (`index.html`, `*.js`, `*.css`, etc.) - the frontend built files (`index.html`, `*.js`, `*.css`, etc.)
@ -379,37 +289,38 @@ in the `socket.io` handlers. `express.js` is also used to serve:
- `routers/` (Express Routers) - `routers/` (Express Routers)
- `socket-handler/` (Socket.io Handlers) - `socket-handler/` (Socket.io Handlers)
- `server.js` (Server entry point) - `server.js` (Server entry point)
- `uptime-kuma-server.js` (UptimeKumaServer class, main logic should be here, - `uptime-kuma-server.js` (UptimeKumaServer class, main logic should be here, but some still in `server.js`)
but some still in `server.js`)
## Frontend Dev Server ## Frontend Dev Server
It binds to `0.0.0.0:3000` by default. The frontend dev server is used for It binds to `0.0.0.0:3000` by default. The frontend dev server is used for development only.
development only.
For production, it is not used. It will be compiled to `dist` directory instead via `npm run build`. For production, it is not used. It will be compiled to `dist` directory instead.
You can use Vue.js devtools Chrome extension for debugging. You can use Vue.js devtools Chrome extension for debugging.
### Build the frontend
```bash
npm run build
```
### Frontend Details ### Frontend Details
Uptime Kuma Frontend is a single page application (SPA). Most paths are handled Uptime Kuma Frontend is a single page application (SPA). Most paths are handled by Vue Router.
by Vue Router.
The router is in `src/router.js` The router is in `src/router.js`
Most data in the frontend is stored at the root level, even though the router can navigate to different pages. As you can see, most data in the frontend is stored at the root level, even though you changed the current router to any other pages.
The data and socket logic are in `src/mixins/socket.js`. The data and socket logic are in `src/mixins/socket.js`.
## Database Migration ## Database Migration
See: <https://github.com/louislam/uptime-kuma/tree/master/db/knex_migrations> See: https://github.com/louislam/uptime-kuma/tree/master/db/knex_migrations
## Unit Test ## Unit Test
To run unit tests, use the following command:
```bash ```bash
npm run build npm run build
npm test npm test
@ -418,11 +329,10 @@ npm test
## Dependencies ## Dependencies
Both frontend and backend share the same `package.json`. Both frontend and backend share the same `package.json`.
However, the frontend dependencies are eventually not used in the production environment, because it However, the frontend dependencies are eventually not used in the production environment, because it is usually also baked into `dist` files. So:
is usually also baked into `dist` files. So:
- Frontend dependencies = "devDependencies" - Frontend dependencies = "devDependencies"
- Examples: - `vue`, `chart.js` - Examples: `vue`, `chart.js`
- Backend dependencies = "dependencies" - Backend dependencies = "dependencies"
- Examples: `socket.io`, `sqlite3` - Examples: `socket.io`, `sqlite3`
- Development dependencies = "devDependencies" - Development dependencies = "devDependencies"
@ -430,35 +340,80 @@ is usually also baked into `dist` files. So:
### Update Dependencies ### Update Dependencies
Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, Since previously updating Vite 2.5.10 to 2.6.0 broke the application completely, from now on, it should update the patch release version only.
from now on, it should update the patch release version only.
Patch release = the third digit ([Semantic Versioning](https://semver.org/)) Patch release = the third digit ([Semantic Versioning](https://semver.org/))
If for security / bug / other reasons, a library must be updated, breaking If for security / bug / other reasons, a library must be updated, breaking changes need to be checked by the person proposing the change.
changes need to be checked by the person proposing the change.
## Translations
Please add **all** the strings which are translatable to `src/lang/en.json` (if translation keys are omitted, they can not be translated.)
**Don't include any other languages in your initial pull request** (even if this is your mother tongue), to avoid merge-conflicts between weblate and `master`.
The translations can then (after merging a PR into `master`) be translated by awesome people donating their language skills.
If you want to help by translating Uptime Kuma into your language, please visit the [instructions on how to translate using weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
## Spelling & Grammar ## Spelling & Grammar
Feel free to correct the spelling and grammar in the documentation or code. Feel free to correct the grammar in the documentation or code.
English is not the native language of the maintainers. My mother language is not English and my grammar is not that great.
## Wiki ## Wiki
Since there is no way to make a pull request to the wiki, I have set up another Since there is no way to make a pull request to the wiki, I have set up another repo to do that.
repo to do that.
<https://github.com/louislam/uptime-kuma-wiki> https://github.com/louislam/uptime-kuma-wiki
## Docker
### Arch
- amd64
- arm64
- armv7
### Docker Tags
#### v2
- `2`, `latest-2`: v2 with full features such as Chromium and bundled MariaDB
- `2.x.x`
- `2-slim`: v2 with basic features
- `2.x.x-slim`
- `beta2`: Latest beta build
- `2.x.x-beta.x`
- `nightly2`: Dev build
- `base2`: Basic Debian setup without Uptime Kuma source code (Full features)
- `base2-slim`: Basic Debian setup without Uptime Kuma source code
- `pr-test2`: For testing pull request without setting up a local environment
#### v1
- `1`, `latest`, `1-debian`, `debian`: Latest version of v1
- `1.x.x`, `1.x.x-debian`
- `1.x.x-beta.x`: Beta build
- `beta`: Latest beta build
- `nightly`: Dev build
- `base-debian`: Basic Debian setup without Uptime Kuma source code
- `pr-test`: For testing pull request without setting up a local environment
- `base-alpine`: (Deprecated) Basic Alpine setup without Uptime Kuma source code
- `1-alpine`, `alpine`: (Deprecated)
- `1.x.x-alpine`: (Deprecated)
## Maintainer ## Maintainer
Check the latest issues and pull requests:
https://github.com/louislam/uptime-kuma/issues?q=sort%3Aupdated-desc
### What is a maintainer and what are their roles? ### What is a maintainer and what are their roles?
This project has multiple maintainers who specialise in different areas. This project has multiple maintainers which specialise in different areas.
Currently, there are 3 maintainers: Currently, there are 3 maintainers:
| Person | Role | Main Area | | Person | Role | Main Area |
| ----------------- | ----------------- | ---------------- | |-------------------|-------------------|------------------|
| `@louislam` | senior maintainer | major features | | `@louislam` | senior maintainer | major features |
| `@chakflying` | junior maintainer | fixing bugs | | `@chakflying` | junior maintainer | fixing bugs |
| `@commanderstorm` | junior maintainer | issue-management | | `@commanderstorm` | junior maintainer | issue-management |
@ -466,46 +421,39 @@ Currently, there are 3 maintainers:
### Procedures ### Procedures
We have a few procedures we follow. These are documented here: We have a few procedures we follow. These are documented here:
- <details><summary>Set up a Docker Builder</summary>
- <details><summary><b>Set up a Docker Builder</b> (click to expand)</summary>
<p> <p>
- amd64, armv7 using local. - amd64, armv7 using local.
- arm64 using remote arm64 cpu, as the emulator is too slow and can no longer - arm64 using remote arm64 cpu, as the emulator is too slow and can no longer pass the `npm ci` command.
pass the `npm ci` command. 1. Add the public key to the remote server.
1. Add the public key to the remote server. 2. Add the remote context. The remote machine must be arm64 and installed Docker CE.
2. Add the remote context. The remote machine must be arm64 and installed ```
Docker CE. docker context create oracle-arm64-jp --docker "host=ssh://root@100.107.174.88"
```
```bash 3. Create a new builder.
docker context create oracle-arm64-jp --docker "host=ssh://root@100.107.174.88" ```
``` docker buildx create --name kuma-builder --platform linux/amd64,linux/arm/v7
docker buildx use kuma-builder
3. Create a new builder. docker buildx inspect --bootstrap
```
```bash 4. Append the remote context to the builder.
docker buildx create --name kuma-builder --platform linux/amd64,linux/arm/v7 ```
docker buildx use kuma-builder docker buildx create --append --name kuma-builder --platform linux/arm64 oracle-arm64-jp
docker buildx inspect --bootstrap ```
``` 5. Verify the builder and check if the builder is using `kuma-builder`.
```
4. Append the remote context to the builder. docker buildx inspect kuma-builder
docker buildx ls
```bash ```
docker buildx create --append --name kuma-builder --platform linux/arm64 oracle-arm64-jp
```
5. Verify the builder and check if the builder is using `kuma-builder`.
`docker buildx inspect kuma-builder docker buildx ls`
</p> </p>
</details> </details>
- <details><summary>Release</summary>
- <details><summary><b>Release</b> (click to expand)</summary>
<p> <p>
1. Draft a release note 1. Draft a release note
2. Make sure the repo is cleared 2. Make sure the repo is cleared
3. If the healthcheck is updated, remember to re-compile it: 3. If the healthcheck is updated, remember to re-compile it: `npm run build-docker-builder-go`
`npm run build-docker-builder-go`
4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN` 4. `npm run release-final` with env vars: `VERSION` and `GITHUB_TOKEN`
5. Wait until the `Press any key to continue` 5. Wait until the `Press any key to continue`
6. `git push` 6. `git push`
@ -514,54 +462,51 @@ We have a few procedures we follow. These are documented here:
9. Deploy to the demo server: `npm run deploy-demo-server` 9. Deploy to the demo server: `npm run deploy-demo-server`
These Items need to be checked: These Items need to be checked:
- [ ] Check all tags is fine on
<https://hub.docker.com/r/louislam/uptime-kuma/tags>
- [ ] Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 /
armv7)
- [ ] Try clean installation with Node.js
- [ ] Check all tags is fine on https://hub.docker.com/r/louislam/uptime-kuma/tags
- [ ] Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 / armv7)
- [ ] Try clean installation with Node.js
</p> </p>
</details> </details>
- <details><summary>Release Beta</summary>
- <details><summary><b>Release Beta</b> (click to expand)</summary>
<p> <p>
1. Draft a release note, check `This is a pre-release` 1. Draft a release note, check `This is a pre-release`
2. Make sure the repo is cleared 2. Make sure the repo is cleared
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN` 3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`
4. Wait until the `Press any key to continue` 4. Wait until the `Press any key to continue`
5. Publish the release note as `1.X.X-beta.X` 5. Publish the release note as `1.X.X-beta.X`
6. Press any key to continue 6. Press any key to continue
</p> </p>
</details> </details>
- <details><summary>Release Wiki</summary>
- <details><summary><b>Release Wiki</b> (click to expand)</summary>
<p> <p>
**Setup Repo** **Setup Repo**
```bash ```bash
git clone https://github.com/louislam/uptime-kuma-wiki.git git clone https://github.com/louislam/uptime-kuma-wiki.git
cd uptime-kuma-wiki cd uptime-kuma-wiki
git remote add production https://github.com/louislam/uptime-kuma.wiki.git git remote add production https://github.com/louislam/uptime-kuma.wiki.git
``` ```
**Push to Production Wiki** **Push to Production Wiki**
```bash ```bash
git pull git pull
git push production master git push production master
``` ```
</p> </p>
</details> </details>
- <details><summary>Change the base of a pull request such as <code>master</code> to <code>1.23.X</code></summary>
- <details><summary>Change the base of a pull request such as <code>master</code> to <code>1.23.X</code> (click to expand)</summary>
<p> <p>
```bash ```bash
git rebase --onto <new parent> <old parent> git rebase --onto <new parent> <old parent>
``` ```
</p> </p>
</details> </details>

View File

@ -1,29 +1,29 @@
<div align="center" width="100%"> <div align="center" width="100%">
<img src="./public/icon.svg" width="128" alt="Uptime Kuma Logo" /> <img src="./public/icon.svg" width="128" alt="" />
</div> </div>
# Uptime Kuma # Uptime Kuma
Uptime Kuma is an easy-to-use self-hosted monitoring tool. Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/2?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/"> [![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" /> <img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a> </a>
<img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="Uptime Kuma Dashboard Screenshot" /> <img src="https://user-images.githubusercontent.com/1336778/212262296-e6205815-ad62-488c-83ec-a5b0d0689f7c.jpg" width="700" alt="" />
## 🥔 Live Demo ## 🥔 Live Demo
Try it! Try it!
Demo Server (Location: Frankfurt - Germany): <https://demo.kuma.pet/start-demo> Demo Server (Location: Frankfurt - Germany): https://demo.kuma.pet/start-demo
It is a temporary live demo, all data will be deleted after 10 minutes. Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors). It is a temporary live demo, all data will be deleted after 10 minutes. Sponsored by [Uptime Kuma Sponsors](https://github.com/louislam/uptime-kuma#%EF%B8%8F-sponsors).
## ⭐ Features ## ⭐ Features
- Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Websocket / Ping / DNS Record / Push / Steam Game Server / Docker Containers - Monitoring uptime for HTTP(s) / TCP / HTTP(s) Keyword / HTTP(s) Json Query / Ping / DNS Record / Push / Steam Game Server / Docker Containers
- Fancy, Reactive, Fast UI/UX - Fancy, Reactive, Fast UI/UX
- Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications) - Notifications via Telegram, Discord, Gotify, Slack, Pushover, Email (SMTP), and [90+ notification services, click here for the full list](https://github.com/louislam/uptime-kuma/tree/master/src/components/notifications)
- 20-second intervals - 20-second intervals
@ -37,44 +37,34 @@ It is a temporary live demo, all data will be deleted after 10 minutes. Sponsore
## 🔧 How to Install ## 🔧 How to Install
### 🐳 Docker Compose ### 🐳 Docker
```bash ```bash
mkdir uptime-kuma docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
cd uptime-kuma
curl -o compose.yaml https://raw.githubusercontent.com/louislam/uptime-kuma/master/compose.yaml
docker compose up -d
``` ```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001). Uptime Kuma is now running on <http://0.0.0.0:3001>.
> [!WARNING] > [!WARNING]
> File Systems like **NFS** (Network File System) are **NOT** supported. Please map to a local directory or volume. > File Systems like **NFS** (Network File System) are **NOT** supported. Please map to a local directory or volume.
### 🐳 Docker Command > [!NOTE]
> If you want to limit exposure to localhost (without exposing port for other users or to use a [reverse proxy](https://github.com/louislam/uptime-kuma/wiki/Reverse-Proxy)), you can expose the port like this:
```bash >
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:2 > ```bash
``` > docker run -d --restart=always -p 127.0.0.1:3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:1
> ```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
If you want to limit exposure to localhost only:
```bash
docker run ... -p 127.0.0.1:3001:3001 ...
```
### 💪🏻 Non-Docker ### 💪🏻 Non-Docker
Requirements: Requirements:
- Platform - Platform
- ✅ Major Linux distros such as Debian, Ubuntu, Fedora and ArchLinux etc. - ✅ Major Linux distros such as Debian, Ubuntu, CentOS, Fedora and ArchLinux etc.
- ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher - ✅ Windows 10 (x64), Windows Server 2012 R2 (x64) or higher
- ❌ FreeBSD / OpenBSD / NetBSD
- ❌ Replit / Heroku - ❌ Replit / Heroku
- [Node.js](https://nodejs.org/en/download/) >= 20.4 - [Node.js](https://nodejs.org/en/download/) 18 / 20.4
- [npm](https://docs.npmjs.com/cli/) 9
- [Git](https://git-scm.com/downloads) - [Git](https://git-scm.com/downloads)
- [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background - [pm2](https://pm2.keymetrics.io/) - For running Uptime Kuma in the background
@ -94,7 +84,7 @@ npm install pm2 -g && pm2 install pm2-logrotate
pm2 start server/server.js --name uptime-kuma pm2 start server/server.js --name uptime-kuma
``` ```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001). Uptime Kuma is now running on http://localhost:3001
More useful PM2 Commands More useful PM2 Commands
@ -103,50 +93,50 @@ More useful PM2 Commands
pm2 monit pm2 monit
# If you want to add it to startup # If you want to add it to startup
pm2 startup && pm2 save pm2 save && pm2 startup
``` ```
### Advanced Installation ### Advanced Installation
If you need more options or need to browse via a reverse proxy, please read: If you need more options or need to browse via a reverse proxy, please read:
<https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install> https://github.com/louislam/uptime-kuma/wiki/%F0%9F%94%A7-How-to-Install
## 🆙 How to Update ## 🆙 How to Update
Please read: Please read:
<https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update> https://github.com/louislam/uptime-kuma/wiki/%F0%9F%86%99-How-to-Update
## 🆕 What's Next? ## 🆕 What's Next?
I will assign requests/issues to the next milestone. I will assign requests/issues to the next milestone.
<https://github.com/louislam/uptime-kuma/milestones> https://github.com/louislam/uptime-kuma/milestones
## ❤️ Sponsors ## ❤️ Sponsors
Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated) Thank you so much! (GitHub Sponsors will be updated manually. OpenCollective sponsors will be updated automatically, the list will be cached by GitHub though. It may need some time to be updated)
<img src="https://uptime.kuma.pet/sponsors?v=6" alt="Uptime Kuma Sponsors" /> <img src="https://uptime.kuma.pet/sponsors?v=6" alt />
## 🖼 More Screenshots ## 🖼 More Screenshots
Light Mode: Light Mode:
<img src="https://uptime.kuma.pet/img/light.jpg" width="512" alt="Uptime Kuma Light Mode Screenshot of how the Dashboard looks" /> <img src="https://uptime.kuma.pet/img/light.jpg" width="512" alt="" />
Status Page: Status Page:
<img src="https://user-images.githubusercontent.com/1336778/134628766-a3fe0981-0926-4285-ab46-891a21c3e4cb.png" width="512" alt="Uptime Kuma Status Page Screenshot" /> <img src="https://user-images.githubusercontent.com/1336778/134628766-a3fe0981-0926-4285-ab46-891a21c3e4cb.png" width="512" alt="" />
Settings Page: Settings Page:
<img src="https://louislam.net/uptimekuma/2.jpg" width="400" alt="Uptime Kuma Settings Page Screenshot" /> <img src="https://louislam.net/uptimekuma/2.jpg" width="400" alt="" />
Telegram Notification Sample: Telegram Notification Sample:
<img src="https://louislam.net/uptimekuma/3.jpg" width="400" alt="Uptime Kuma Telegram Notification Sample Screenshot" /> <img src="https://louislam.net/uptimekuma/3.jpg" width="400" alt="" />
## Motivation ## Motivation
@ -175,19 +165,19 @@ You can mention me if you ask a question on the subreddit.
### Create Pull Requests ### Create Pull Requests
Pull requests are awesome. We DO NOT accept all types of pull requests and do not want to waste your time. Please be sure that you have read and follow pull request rules:
To keep reviews fast and effective, please make sure youve [read our pull request guidelines](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma). [CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma)
### Test Pull Requests ### Test Pull Requests
There are a lot of pull requests right now, but I don't have time to test them all. There are a lot of pull requests right now, but I don't have time to test them all.
If you want to help, you can check this: If you want to help, you can check this:
<https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests> https://github.com/louislam/uptime-kuma/wiki/Test-Pull-Requests
### Test Beta Version ### Test Beta Version
Check out the latest beta release here: <https://github.com/louislam/uptime-kuma/releases> Check out the latest beta release here: https://github.com/louislam/uptime-kuma/releases
### Bug Reports / Feature Requests ### Bug Reports / Feature Requests
@ -201,3 +191,5 @@ If you want to translate Uptime Kuma into your language, please visit [Weblate R
Feel free to correct the grammar in the documentation or code. Feel free to correct the grammar in the documentation or code.
My mother language is not English and my grammar is not that great. My mother language is not English and my grammar is not that great.

View File

@ -2,42 +2,29 @@
## Reporting a Vulnerability ## Reporting a Vulnerability
1. Please report security issues to 1. Please report security issues to https://github.com/louislam/uptime-kuma/security/advisories/new.
<https://github.com/louislam/uptime-kuma/security/advisories/new>. 2. Please also create an empty security issue to alert me, as GitHub Advisories do not send a notification, I probably will miss it without this. https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md
2. Please also create an empty security issue to alert me, as GitHub Advisories
do not send a notification, I probably will miss it without this.
<https://github.com/louislam/uptime-kuma/issues/new?assignees=&labels=help&template=security.md>
- Do not report any upstream dependency issues / scan result by any tools. It will be closed immediately without explanations. Unless you have PoC to prove that the upstream issue affected Uptime Kuma. Do not use the public issue tracker or discuss it in public as it will cause more damage.
- Do not use the public issue tracker or discuss it in public as it will cause
more damage.
## Do you accept other 3rd-party bug bounty platforms? ## Do you accept other 3rd-party bug bounty platforms?
At this moment, I DO NOT accept other bug bounty platforms, because I am not At this moment, I DO NOT accept other bug bounty platforms, because I am not familiar with these platforms and someone has tried to send a phishing link to me by doing this already. To minimize my own risk, please report through GitHub Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
familiar with these platforms and someone has tried to send a phishing link to
me by doing this already. To minimize my own risk, please report through GitHub
Advisories only. I will ignore all 3rd-party bug bounty platforms emails.
## Supported Versions ## Supported Versions
### Uptime Kuma Versions ### Uptime Kuma Versions
You should use or upgrade to the latest version of Uptime Kuma. You should use or upgrade to the latest version of Uptime Kuma. All `1.X.X` versions are upgradable to the latest version.
All versions are upgradable to the latest version.
### Upgradable Docker Tags ### Upgradable Docker Tags
| Tag | Supported | | Tag | Supported |
| --------------- | ------------------------------------------------------------------------------------- | |-|-|
| 2 | :white_check_mark: | | 1 | :white_check_mark: |
| 2-slim | :white_check_mark: | | 1-debian | :white_check_mark: |
| next | :white_check_mark: | | latest | :white_check_mark: |
| next-slim | :white_check_mark: | | debian | :white_check_mark: |
| 2-rootless | :white_check_mark: | | 1-alpine | ⚠️ Deprecated |
| 2-slim-rootless | :white_check_mark: | | alpine | ⚠️ Deprecated |
| 1 | [⚠️ Deprecated](https://github.com/louislam/uptime-kuma/wiki/Migration-From-v1-To-v2) | | All other tags | ❌ |
| 1-debian | [⚠️ Deprecated](https://github.com/louislam/uptime-kuma/wiki/Migration-From-v1-To-v2) |
| latest | [⚠️ Deprecated](https://github.com/louislam/uptime-kuma/wiki/Migration-From-v1-To-v2) |
| debian | [⚠️ Deprecated](https://github.com/louislam/uptime-kuma/wiki/Migration-From-v1-To-v2) |
| All other tags | ❌ |

View File

@ -1,9 +1,9 @@
services: services:
uptime-kuma: uptime-kuma:
image: louislam/uptime-kuma:2 image: louislam/uptime-kuma:1
restart: unless-stopped
volumes: volumes:
- ./data:/app/data - ./data:/app/data
ports: ports:
# <Host Port>:<Container Port> # <Host Port>:<Container Port>
- "3001:3001" - 3001:3001
restart: unless-stopped

View File

@ -0,0 +1,5 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/backend.spec.js",
};

View File

@ -22,11 +22,10 @@ export default defineConfig({
// Reporter to use // Reporter to use
reporter: [ reporter: [
[ [
"html", "html", {
{
outputFolder: "../private/playwright-report", outputFolder: "../private/playwright-report",
open: "never", open: "never",
}, }
], ],
], ],
@ -48,7 +47,7 @@ export default defineConfig({
{ {
name: "specs", name: "specs",
use: { ...devices["Desktop Chrome"] }, use: { ...devices["Desktop Chrome"] },
dependencies: ["run-once setup"], dependencies: [ "run-once setup" ],
}, },
/* /*
{ {

View File

@ -2,7 +2,7 @@ import vue from "@vitejs/plugin-vue";
import { defineConfig } from "vite"; import { defineConfig } from "vite";
import visualizer from "rollup-plugin-visualizer"; import visualizer from "rollup-plugin-visualizer";
import viteCompression from "vite-plugin-compression"; import viteCompression from "vite-plugin-compression";
import { VitePWA } from "vite-plugin-pwa"; import VueDevTools from "vite-plugin-vue-devtools";
const postCssScss = require("postcss-scss"); const postCssScss = require("postcss-scss");
const postcssRTLCSS = require("postcss-rtlcss"); const postcssRTLCSS = require("postcss-rtlcss");
@ -15,13 +15,13 @@ export default defineConfig({
port: 3000, port: 3000,
}, },
define: { define: {
FRONTEND_VERSION: JSON.stringify(process.env.npm_package_version), "FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
"process.env": {}, "process.env": {},
}, },
plugins: [ plugins: [
vue(), vue(),
visualizer({ visualizer({
filename: "tmp/dist-stats.html", filename: "tmp/dist-stats.html"
}), }),
viteCompression({ viteCompression({
algorithm: "gzip", algorithm: "gzip",
@ -31,31 +31,25 @@ export default defineConfig({
algorithm: "brotliCompress", algorithm: "brotliCompress",
filter: viteCompressionFilter, filter: viteCompressionFilter,
}), }),
VitePWA({ VueDevTools(),
registerType: null,
srcDir: "src",
filename: "serviceWorker.ts",
strategies: "injectManifest",
injectManifest: {
maximumFileSizeToCacheInBytes: 3 * 1024 * 1024, // 3 MiB
},
}),
], ],
css: { css: {
postcss: { postcss: {
parser: postCssScss, "parser": postCssScss,
map: false, "map": false,
plugins: [postcssRTLCSS], "plugins": [ postcssRTLCSS ]
}, }
}, },
build: { build: {
commonjsOptions: { commonjsOptions: {
include: [/.js$/], include: [ /.js$/ ],
}, },
rollupOptions: { rollupOptions: {
output: { output: {
manualChunks(id, { getModuleInfo, getModuleIds }) {}, manualChunks(id, { getModuleInfo, getModuleIds }) {
},
}
}
}, },
}, }
}); });

View File

@ -39,7 +39,7 @@ async function createTables() {
table.integer("user_id").unsigned().notNullable(); table.integer("user_id").unsigned().notNullable();
table.string("protocol", 10).notNullable(); table.string("protocol", 10).notNullable();
table.string("host", 255).notNullable(); table.string("host", 255).notNullable();
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
table.boolean("auth").notNullable(); table.boolean("auth").notNullable();
table.string("username", 255).nullable(); table.string("username", 255).nullable();
table.string("password", 255).nullable(); table.string("password", 255).nullable();
@ -67,7 +67,10 @@ async function createTables() {
table.increments("id"); table.increments("id");
table.string("name", 150); table.string("name", 150);
table.boolean("active").notNullable().defaultTo(true); table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned().references("id").inTable("user").onDelete("SET NULL").onUpdate("CASCADE"); table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("interval").notNullable().defaultTo(20); table.integer("interval").notNullable().defaultTo(20);
table.text("url"); table.text("url");
table.string("type", 20); table.string("type", 20);
@ -80,7 +83,7 @@ async function createTables() {
table.boolean("ignore_tls").notNullable().defaultTo(false); table.boolean("ignore_tls").notNullable().defaultTo(false);
table.boolean("upside_down").notNullable().defaultTo(false); table.boolean("upside_down").notNullable().defaultTo(false);
table.integer("maxredirects").notNullable().defaultTo(10); table.integer("maxredirects").notNullable().defaultTo(10);
table.text("accepted_statuscodes_json").notNullable().defaultTo('["200-299"]'); table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
table.string("dns_resolve_type", 5); table.string("dns_resolve_type", 5);
table.string("dns_resolve_server", 255); table.string("dns_resolve_server", 255);
table.string("dns_last_result", 255); table.string("dns_last_result", 255);
@ -91,9 +94,11 @@ async function createTables() {
table.text("headers").defaultTo(null); table.text("headers").defaultTo(null);
table.text("basic_auth_user").defaultTo(null); table.text("basic_auth_user").defaultTo(null);
table.text("basic_auth_pass").defaultTo(null); table.text("basic_auth_pass").defaultTo(null);
table.integer("docker_host").unsigned().references("id").inTable("docker_host"); table.integer("docker_host").unsigned()
.references("id").inTable("docker_host");
table.string("docker_container", 255); table.string("docker_container", 255);
table.integer("proxy_id").unsigned().references("id").inTable("proxy"); table.integer("proxy_id").unsigned()
.references("id").inTable("proxy");
table.boolean("expiry_notification").defaultTo(true); table.boolean("expiry_notification").defaultTo(true);
table.text("mqtt_topic"); table.text("mqtt_topic");
table.string("mqtt_success_message", 255); table.string("mqtt_success_message", 255);
@ -125,12 +130,8 @@ async function createTables() {
await knex.schema.createTable("heartbeat", (table) => { await knex.schema.createTable("heartbeat", (table) => {
table.increments("id"); table.increments("id");
table.boolean("important").notNullable().defaultTo(false); table.boolean("important").notNullable().defaultTo(false);
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.smallint("status").notNullable(); table.smallint("status").notNullable();
@ -142,9 +143,9 @@ async function createTables() {
table.integer("down_count").notNullable().defaultTo(0); table.integer("down_count").notNullable().defaultTo(0);
table.index("important"); table.index("important");
table.index(["monitor_id", "time"], "monitor_time_index"); table.index([ "monitor_id", "time" ], "monitor_time_index");
table.index("monitor_id"); table.index("monitor_id");
table.index(["monitor_id", "important", "time"], "monitor_important_time_index"); table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
}); });
// incident // incident
@ -165,7 +166,10 @@ async function createTables() {
table.increments("id"); table.increments("id");
table.string("title", 150).notNullable(); table.string("title", 150).notNullable();
table.text("description").notNullable(); table.text("description").notNullable();
table.integer("user_id").unsigned().references("id").inTable("user").onDelete("SET NULL").onUpdate("CASCADE"); table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.boolean("active").notNullable().defaultTo(true); table.boolean("active").notNullable().defaultTo(true);
table.string("strategy", 50).notNullable().defaultTo("single"); table.string("strategy", 50).notNullable().defaultTo("single");
table.datetime("start_date"); table.datetime("start_date");
@ -177,7 +181,7 @@ async function createTables() {
table.integer("interval_day"); table.integer("interval_day");
table.index("active"); table.index("active");
table.index(["strategy", "active"], "manual_active"); table.index([ "strategy", "active" ], "manual_active");
table.index("user_id", "maintenance_user_id"); table.index("user_id", "maintenance_user_id");
}); });
@ -205,21 +209,13 @@ async function createTables() {
await knex.schema.createTable("maintenance_status_page", (table) => { await knex.schema.createTable("maintenance_status_page", (table) => {
table.increments("id"); table.increments("id");
table table.integer("status_page_id").unsigned().notNullable()
.integer("status_page_id") .references("id").inTable("status_page")
.unsigned()
.notNullable()
.references("id")
.inTable("status_page")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table table.integer("maintenance_id").unsigned().notNullable()
.integer("maintenance_id") .references("id").inTable("maintenance")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
}); });
@ -227,12 +223,8 @@ async function createTables() {
// maintenance_timeslot // maintenance_timeslot
await knex.schema.createTable("maintenance_timeslot", (table) => { await knex.schema.createTable("maintenance_timeslot", (table) => {
table.increments("id"); table.increments("id");
table table.integer("maintenance_id").unsigned().notNullable()
.integer("maintenance_id") .references("id").inTable("maintenance")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.datetime("start_date").notNullable(); table.datetime("start_date").notNullable();
@ -240,51 +232,35 @@ async function createTables() {
table.boolean("generated_next").defaultTo(false); table.boolean("generated_next").defaultTo(false);
table.index("maintenance_id"); table.index("maintenance_id");
table.index(["maintenance_id", "start_date", "end_date"], "active_timeslot_index"); table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
table.index("generated_next", "generated_next_index"); table.index("generated_next", "generated_next_index");
}); });
// monitor_group // monitor_group
await knex.schema.createTable("monitor_group", (table) => { await knex.schema.createTable("monitor_group", (table) => {
table.increments("id"); table.increments("id");
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table table.integer("group_id").unsigned().notNullable()
.integer("group_id") .references("id").inTable("group")
.unsigned()
.notNullable()
.references("id")
.inTable("group")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.integer("weight").notNullable().defaultTo(1000); table.integer("weight").notNullable().defaultTo(1000);
table.boolean("send_url").notNullable().defaultTo(false); table.boolean("send_url").notNullable().defaultTo(false);
table.index(["monitor_id", "group_id"], "fk"); table.index([ "monitor_id", "group_id" ], "fk");
}); });
// monitor_maintenance // monitor_maintenance
await knex.schema.createTable("monitor_maintenance", (table) => { await knex.schema.createTable("monitor_maintenance", (table) => {
table.increments("id"); table.increments("id");
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table table.integer("maintenance_id").unsigned().notNullable()
.integer("maintenance_id") .references("id").inTable("maintenance")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
@ -304,25 +280,17 @@ async function createTables() {
// monitor_notification // monitor_notification
await knex.schema.createTable("monitor_notification", (table) => { await knex.schema.createTable("monitor_notification", (table) => {
table.increments("id").unsigned(); // TODO: no auto increment???? table.increments("id").unsigned(); // TODO: no auto increment????
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table table.integer("notification_id").unsigned().notNullable()
.integer("notification_id") .references("id").inTable("notification")
.unsigned()
.notNullable()
.references("id")
.inTable("notification")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.index(["monitor_id", "notification_id"], "monitor_notification_index"); table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
}); });
// tag // tag
@ -336,20 +304,12 @@ async function createTables() {
// monitor_tag // monitor_tag
await knex.schema.createTable("monitor_tag", (table) => { await knex.schema.createTable("monitor_tag", (table) => {
table.increments("id"); table.increments("id");
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table table.integer("tag_id").unsigned().notNullable()
.integer("tag_id") .references("id").inTable("tag")
.unsigned()
.notNullable()
.references("id")
.inTable("tag")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.text("value"); table.text("value");
@ -358,12 +318,8 @@ async function createTables() {
// monitor_tls_info // monitor_tls_info
await knex.schema.createTable("monitor_tls_info", (table) => { await knex.schema.createTable("monitor_tls_info", (table) => {
table.increments("id"); table.increments("id");
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.text("info_json"); table.text("info_json");
@ -375,8 +331,8 @@ async function createTables() {
table.string("type", 50).notNullable(); table.string("type", 50).notNullable();
table.integer("monitor_id").unsigned().notNullable(); table.integer("monitor_id").unsigned().notNullable();
table.integer("days").notNullable(); table.integer("days").notNullable();
table.unique(["type", "monitor_id", "days"]); table.unique([ "type", "monitor_id", "days" ]);
table.index(["type", "monitor_id", "days"], "good_index"); table.index([ "type", "monitor_id", "days" ], "good_index");
}); });
// setting // setting
@ -390,19 +346,16 @@ async function createTables() {
// status_page_cname // status_page_cname
await knex.schema.createTable("status_page_cname", (table) => { await knex.schema.createTable("status_page_cname", (table) => {
table.increments("id"); table.increments("id");
table table.integer("status_page_id").unsigned()
.integer("status_page_id") .references("id").inTable("status_page")
.unsigned()
.references("id")
.inTable("status_page")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.string("domain").notNullable().unique().collate("utf8_general_ci"); table.string("domain").notNullable().unique().collate("utf8_general_ci");
}); });
/********************* /*********************
* Converted Patch here * Converted Patch here
*********************/ *********************/
// 2023-06-30-1348-http-body-encoding.js // 2023-06-30-1348-http-body-encoding.js
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25); // ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
@ -443,12 +396,8 @@ async function createTables() {
table.increments("id").primary(); table.increments("id").primary();
table.string("key", 255).notNullable(); table.string("key", 255).notNullable();
table.string("name", 255).notNullable(); table.string("name", 255).notNullable();
table table.integer("user_id").unsigned().notNullable()
.integer("user_id") .references("id").inTable("user")
.unsigned()
.notNullable()
.references("id")
.inTable("user")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable(); table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
@ -481,11 +430,13 @@ async function createTables() {
ALTER TABLE maintenance ADD timezone VARCHAR(255); ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER; ALTER TABLE maintenance ADD duration INTEGER;
*/ */
await knex.schema.dropTableIfExists("maintenance_timeslot").table("maintenance", function (table) { await knex.schema
table.text("cron"); .dropTableIfExists("maintenance_timeslot")
table.string("timezone", 255); .table("maintenance", function (table) {
table.integer("duration"); table.text("cron");
}); table.string("timezone", 255);
table.integer("duration");
});
// 2023-06-30-1413-add-parent-monitor.js. // 2023-06-30-1413-add-parent-monitor.js.
/* /*
@ -493,7 +444,10 @@ async function createTables() {
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE; ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
*/ */
await knex.schema.table("monitor", function (table) { await knex.schema.table("monitor", function (table) {
table.integer("parent").unsigned().references("id").inTable("monitor").onDelete("SET NULL").onUpdate("CASCADE"); table.integer("parent").unsigned()
.references("id").inTable("monitor")
.onDelete("SET NULL")
.onUpdate("CASCADE");
}); });
/* /*

View File

@ -3,41 +3,39 @@ exports.up = function (knex) {
.createTable("stat_minutely", function (table) { .createTable("stat_minutely", function (table) {
table.increments("id"); table.increments("id");
table.comment("This table contains the minutely aggregate statistics for each monitor"); table.comment("This table contains the minutely aggregate statistics for each monitor");
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest minute"); table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest minute");
table.float("ping").notNullable().comment("Average ping in milliseconds"); table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable(); table.smallint("up").notNullable();
table.smallint("down").notNullable(); table.smallint("down").notNullable();
table.unique(["monitor_id", "timestamp"]); table.unique([ "monitor_id", "timestamp" ]);
}) })
.createTable("stat_daily", function (table) { .createTable("stat_daily", function (table) {
table.increments("id"); table.increments("id");
table.comment("This table contains the daily aggregate statistics for each monitor"); table.comment("This table contains the daily aggregate statistics for each monitor");
table table.integer("monitor_id").unsigned().notNullable()
.integer("monitor_id") .references("id").inTable("monitor")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE") .onDelete("CASCADE")
.onUpdate("CASCADE"); .onUpdate("CASCADE");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest day"); table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest day");
table.float("ping").notNullable().comment("Average ping in milliseconds"); table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable(); table.smallint("up").notNullable();
table.smallint("down").notNullable(); table.smallint("down").notNullable();
table.unique(["monitor_id", "timestamp"]); table.unique([ "monitor_id", "timestamp" ]);
}); });
}; };
exports.down = function (knex) { exports.down = function (knex) {
return knex.schema.dropTable("stat_minutely").dropTable("stat_daily"); return knex.schema
.dropTable("stat_minutely")
.dropTable("stat_daily");
}; };

View File

@ -1,13 +1,16 @@
exports.up = function (knex) { exports.up = function (knex) {
// Add new column heartbeat.end_time // Add new column heartbeat.end_time
return knex.schema.alterTable("heartbeat", function (table) { return knex.schema
table.datetime("end_time").nullable().defaultTo(null); .alterTable("heartbeat", function (table) {
}); table.datetime("end_time").nullable().defaultTo(null);
});
}; };
exports.down = function (knex) { exports.down = function (knex) {
// Rename heartbeat.start_time to heartbeat.time // Rename heartbeat.start_time to heartbeat.time
return knex.schema.alterTable("heartbeat", function (table) { return knex.schema
table.dropColumn("end_time"); .alterTable("heartbeat", function (table) {
}); table.dropColumn("end_time");
});
}; };

View File

@ -1,12 +1,15 @@
exports.up = function (knex) { exports.up = function (knex) {
// Add new column heartbeat.retries // Add new column heartbeat.retries
return knex.schema.alterTable("heartbeat", function (table) { return knex.schema
table.integer("retries").notNullable().defaultTo(0); .alterTable("heartbeat", function (table) {
}); table.integer("retries").notNullable().defaultTo(0);
});
}; };
exports.down = function (knex) { exports.down = function (knex) {
return knex.schema.alterTable("heartbeat", function (table) { return knex.schema
table.dropColumn("retries"); .alterTable("heartbeat", function (table) {
}); table.dropColumn("retries");
});
}; };

View File

@ -1,13 +1,16 @@
exports.up = function (knex) { exports.up = function (knex) {
// Add new column monitor.mqtt_check_type // Add new column monitor.mqtt_check_type
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword"); .alterTable("monitor", function (table) {
}); table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
});
}; };
exports.down = function (knex) { exports.down = function (knex) {
// Drop column monitor.mqtt_check_type // Drop column monitor.mqtt_check_type
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.dropColumn("mqtt_check_type"); .alterTable("monitor", function (table) {
}); table.dropColumn("mqtt_check_type");
});
}; };

View File

@ -1,12 +1,14 @@
exports.up = function (knex) { exports.up = function (knex) {
// update monitor.push_token to 32 length // update monitor.push_token to 32 length
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.string("push_token", 32).alter(); .alterTable("monitor", function (table) {
}); table.string("push_token", 32).alter();
});
}; };
exports.down = function (knex) { exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.string("push_token", 20).alter(); .alterTable("monitor", function (table) {
}); table.string("push_token", 20).alter();
});
}; };

View File

@ -5,14 +5,9 @@ exports.up = function (knex) {
table.string("name", 255).notNullable(); table.string("name", 255).notNullable();
table.string("url", 255).notNullable(); table.string("url", 255).notNullable();
table.integer("user_id").unsigned(); table.integer("user_id").unsigned();
}) }).alterTable("monitor", function (table) {
.alterTable("monitor", function (table) {
// Add new column monitor.remote_browser // Add new column monitor.remote_browser
table table.integer("remote_browser").nullable().defaultTo(null).unsigned()
.integer("remote_browser")
.nullable()
.defaultTo(null)
.unsigned()
.index() .index()
.references("id") .references("id")
.inTable("remote_browser"); .inTable("remote_browser");

View File

@ -1,7 +1,8 @@
exports.up = function (knex) { exports.up = function (knex) {
return knex.schema.alterTable("status_page", function (table) { return knex.schema
table.integer("auto_refresh_interval").defaultTo(300).unsigned(); .alterTable("status_page", function (table) {
}); table.integer("auto_refresh_interval").defaultTo(300).unsigned();
});
}; };
exports.down = function (knex) { exports.down = function (knex) {

View File

@ -1,29 +1,14 @@
exports.up = function (knex) { exports.up = function (knex) {
return knex.schema return knex.schema
.alterTable("stat_daily", function (table) { .alterTable("stat_daily", function (table) {
table table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
.float("ping_min") table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
.notNullable()
.defaultTo(0)
.comment("Minimum ping during this period in milliseconds");
table
.float("ping_max")
.notNullable()
.defaultTo(0)
.comment("Maximum ping during this period in milliseconds");
}) })
.alterTable("stat_minutely", function (table) { .alterTable("stat_minutely", function (table) {
table table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
.float("ping_min") table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
.notNullable()
.defaultTo(0)
.comment("Minimum ping during this period in milliseconds");
table
.float("ping_max")
.notNullable()
.defaultTo(0)
.comment("Maximum ping during this period in milliseconds");
}); });
}; };
exports.down = function (knex) { exports.down = function (knex) {

View File

@ -1,26 +1,26 @@
exports.up = function (knex) { exports.up = function (knex) {
return knex.schema.createTable("stat_hourly", function (table) { return knex.schema
table.increments("id"); .createTable("stat_hourly", function (table) {
table.comment("This table contains the hourly aggregate statistics for each monitor"); table.increments("id");
table table.comment("This table contains the hourly aggregate statistics for each monitor");
.integer("monitor_id") table.integer("monitor_id").unsigned().notNullable()
.unsigned() .references("id").inTable("monitor")
.notNullable() .onDelete("CASCADE")
.references("id") .onUpdate("CASCADE");
.inTable("monitor") table.integer("timestamp")
.onDelete("CASCADE") .notNullable()
.onUpdate("CASCADE"); .comment("Unix timestamp rounded down to the nearest hour");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest hour"); table.float("ping").notNullable().comment("Average ping in milliseconds");
table.float("ping").notNullable().comment("Average ping in milliseconds"); table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds"); table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds"); table.smallint("up").notNullable();
table.smallint("up").notNullable(); table.smallint("down").notNullable();
table.smallint("down").notNullable();
table.unique(["monitor_id", "timestamp"]); table.unique([ "monitor_id", "timestamp" ]);
}); });
}; };
exports.down = function (knex) { exports.down = function (knex) {
return knex.schema.dropTable("stat_hourly"); return knex.schema
.dropTable("stat_hourly");
}; };

View File

@ -9,6 +9,7 @@ exports.up = function (knex) {
.alterTable("stat_hourly", function (table) { .alterTable("stat_hourly", function (table) {
table.text("extras").defaultTo(null).comment("Extra statistics during this time period"); table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
}); });
}; };
exports.down = function (knex) { exports.down = function (knex) {

View File

@ -1,9 +1,10 @@
exports.up = function (knex) { exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.string("snmp_oid").defaultTo(null); .alterTable("monitor", function (table) {
table.enum("snmp_version", ["1", "2c", "3"]).defaultTo("2c"); table.string("snmp_oid").defaultTo(null);
table.string("json_path_operator").defaultTo(null); table.enum("snmp_version", [ "1", "2c", "3" ]).defaultTo("2c");
}); table.string("json_path_operator").defaultTo(null);
});
}; };
exports.down = function (knex) { exports.down = function (knex) {

View File

@ -1,11 +1,13 @@
exports.up = function (knex) { exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.boolean("cache_bust").notNullable().defaultTo(false); .alterTable("monitor", function (table) {
}); table.boolean("cache_bust").notNullable().defaultTo(false);
});
}; };
exports.down = function (knex) { exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.dropColumn("cache_bust"); .alterTable("monitor", function (table) {
}); table.dropColumn("cache_bust");
});
}; };

View File

@ -1,7 +1,8 @@
exports.up = function (knex) { exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) { return knex.schema
table.text("conditions").notNullable().defaultTo("[]"); .alterTable("monitor", function (table) {
}); table.text("conditions").notNullable().defaultTo("[]");
});
}; };
exports.down = function (knex) { exports.down = function (knex) {

View File

@ -1,15 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.text("rabbitmq_nodes");
table.string("rabbitmq_username");
table.string("rabbitmq_password");
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("rabbitmq_nodes");
table.dropColumn("rabbitmq_username");
table.dropColumn("rabbitmq_password");
});
};

View File

@ -1,7 +0,0 @@
exports.up = function (knex) {
return knex("monitor").whereNull("json_path_operator").update("json_path_operator", "==");
};
exports.down = function (knex) {
// changing the json_path_operator back to null for all "==" is not possible anymore
// we have lost the context which fields have been set explicitely in >= v2.0 and which would need to be reverted
};

View File

@ -1,12 +0,0 @@
// Update info_json column to LONGTEXT mainly for MariaDB
exports.up = function (knex) {
return knex.schema.alterTable("monitor_tls_info", function (table) {
table.text("info_json", "longtext").alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor_tls_info", function (table) {
table.text("info_json", "text").alter();
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("smtp_security").defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("smtp_security");
});
};

View File

@ -1,14 +0,0 @@
// Add websocket ignore headers and websocket subprotocol
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("ws_ignore_sec_websocket_accept_header").notNullable().defaultTo(false);
table.string("ws_subprotocol", 255).notNullable().defaultTo("");
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ws_ignore_sec_websocket_accept_header");
table.dropColumn("ws_subprotocol");
});
};

View File

@ -1,23 +0,0 @@
// Udpate status_page table to generalize analytics fields
exports.up = function (knex) {
return knex.schema
.alterTable("status_page", function (table) {
table.renameColumn("google_analytics_tag_id", "analytics_id");
table.string("analytics_script_url");
table.enu("analytics_type", ["google", "umami", "plausible", "matomo"]).defaultTo(null);
})
.then(() => {
// After a succesful migration, add google as default for previous pages
knex("status_page").whereNotNull("analytics_id").update({
analytics_type: "google",
});
});
};
exports.down = function (knex) {
return knex.schema.alterTable("status_page", function (table) {
table.renameColumn("analytics_id", "google_analytics_tag_id");
table.dropColumn("analytics_script_url");
table.dropColumn("analytics_type");
});
};

View File

@ -1,21 +0,0 @@
/* SQL:
ALTER TABLE monitor ADD ping_count INTEGER default 1 not null;
ALTER TABLE monitor ADD ping_numeric BOOLEAN default true not null;
ALTER TABLE monitor ADD ping_per_request_timeout INTEGER default 2 not null;
*/
exports.up = function (knex) {
// Add new columns to table monitor
return knex.schema.alterTable("monitor", function (table) {
table.integer("ping_count").defaultTo(1).notNullable();
table.boolean("ping_numeric").defaultTo(true).notNullable();
table.integer("ping_per_request_timeout").defaultTo(2).notNullable();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ping_count");
table.dropColumn("ping_numeric");
table.dropColumn("ping_per_request_timeout");
});
};

View File

@ -1,12 +0,0 @@
// Fix #5721: Change proxy port column type to integer to support larger port numbers
exports.up = function (knex) {
return knex.schema.alterTable("proxy", function (table) {
table.integer("port").alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("proxy", function (table) {
table.smallint("port").alter();
});
};

View File

@ -1,12 +0,0 @@
// Add column custom_url to monitor_group table
exports.up = function (knex) {
return knex.schema.alterTable("monitor_group", function (table) {
table.text("custom_url", "text");
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor_group", function (table) {
table.dropColumn("custom_url");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("ip_family").defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ip_family");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("manual_status").defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("manual_status");
});
};

View File

@ -1,33 +0,0 @@
// Add column last_start_date to maintenance table
exports.up = async function (knex) {
await knex.schema.alterTable("maintenance", function (table) {
table.datetime("last_start_date");
});
// Perform migration for recurring-interval strategy
const recurringMaintenances = await knex("maintenance")
.where({
strategy: "recurring-interval",
cron: "* * * * *",
})
.select("id", "start_time");
// eslint-disable-next-line camelcase
const maintenanceUpdates = recurringMaintenances.map(async ({ start_time, id }) => {
// eslint-disable-next-line camelcase
const [hourStr, minuteStr] = start_time.split(":");
const hour = parseInt(hourStr, 10);
const minute = parseInt(minuteStr, 10);
const cron = `${minute} ${hour} * * *`;
await knex("maintenance").where({ id }).update({ cron });
});
await Promise.all(maintenanceUpdates);
};
exports.down = function (knex) {
return knex.schema.alterTable("maintenance", function (table) {
table.dropColumn("last_start_date");
});
};

View File

@ -1,12 +0,0 @@
// Fix: Change manual_status column type to smallint
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.smallint("manual_status").alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("manual_status").alter();
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("oauth_audience").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("oauth_audience").alter();
});
};

View File

@ -1,13 +0,0 @@
exports.up = function (knex) {
// Add new column monitor.mqtt_websocket_path
return knex.schema.alterTable("monitor", function (table) {
table.string("mqtt_websocket_path", 255).nullable();
});
};
exports.down = function (knex) {
// Drop column monitor.mqtt_websocket_path
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("mqtt_websocket_path");
});
};

View File

@ -1,23 +0,0 @@
exports.up = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
table.boolean("domain_expiry_notification").defaultTo(1);
})
.createTable("domain_expiry", (table) => {
table.increments("id");
table.datetime("last_check");
// Use VARCHAR(255) for MySQL/MariaDB compatibility with unique constraint
// Maximum domain name length is 253 characters (255 octets on the wire)
table.string("domain", 255).unique().notNullable();
table.datetime("expiry");
table.integer("last_expiry_notification_sent").defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("monitor", function (table) {
table.boolean("domain_expiry_notification").alter();
})
.dropTable("domain_expiry");
};

View File

@ -1,14 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
// Fix ip_family, change to varchar instead of boolean
// possible values are "ipv4" and "ipv6"
table.string("ip_family", 4).defaultTo(null).alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
// Rollback to boolean
table.boolean("ip_family").defaultTo(null).alter();
});
};

View File

@ -1,27 +0,0 @@
// Fix for #4315. Logically, setting it to 0 ping may not be correct, but it is better than throwing errors
exports.up = function (knex) {
return knex.schema
.alterTable("stat_daily", function (table) {
table.integer("ping").defaultTo(0).alter();
})
.alterTable("stat_hourly", function (table) {
table.integer("ping").defaultTo(0).alter();
})
.alterTable("stat_minutely", function (table) {
table.integer("ping").defaultTo(0).alter();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("stat_daily", function (table) {
table.integer("ping").alter();
})
.alterTable("stat_hourly", function (table) {
table.integer("ping").alter();
})
.alterTable("stat_minutely", function (table) {
table.integer("ping").alter();
});
};

View File

@ -1,15 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("save_response").notNullable().defaultTo(false);
table.boolean("save_error_response").notNullable().defaultTo(true);
table.integer("response_max_length").notNullable().defaultTo(1024); // Default 1KB
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("save_response");
table.dropColumn("save_error_response");
table.dropColumn("response_max_length");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("heartbeat", function (table) {
table.text("response").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("heartbeat", function (table) {
table.dropColumn("response");
});
};

View File

@ -1,13 +0,0 @@
exports.up = function (knex) {
// Add new column status_page.show_only_last_heartbeat
return knex.schema.alterTable("status_page", function (table) {
table.boolean("show_only_last_heartbeat").notNullable().defaultTo(false);
});
};
exports.down = function (knex) {
// Drop column status_page.show_only_last_heartbeat
return knex.schema.alterTable("status_page", function (table) {
table.dropColumn("show_only_last_heartbeat");
});
};

View File

@ -1,19 +0,0 @@
/**
* @param {import("knex").Knex} knex The Knex.js instance for database interaction.
* @returns {Promise<void>}
*/
exports.up = async (knex) => {
await knex.schema.alterTable("monitor", (table) => {
table.string("system_service_name");
});
};
/**
* @param {import("knex").Knex} knex The Knex.js instance for database interaction.
* @returns {Promise<void>}
*/
exports.down = async (knex) => {
await knex.schema.alterTable("monitor", (table) => {
table.dropColumn("system_service_name");
});
};

View File

@ -1,37 +0,0 @@
exports.up = async function (knex) {
const isSQLite = knex.client.dialect === "sqlite3";
if (isSQLite) {
// For SQLite: Use partial indexes with WHERE important = 1
// Drop existing indexes using IF EXISTS
await knex.raw("DROP INDEX IF EXISTS monitor_important_time_index");
await knex.raw("DROP INDEX IF EXISTS heartbeat_important_index");
// Create partial indexes with predicate
await knex.schema.alterTable("heartbeat", function (table) {
table.index(["monitor_id", "time"], "monitor_important_time_index", {
predicate: knex.whereRaw("important = 1"),
});
table.index(["important"], "heartbeat_important_index", {
predicate: knex.whereRaw("important = 1"),
});
});
}
// For MariaDB/MySQL: No changes (partial indexes not supported)
};
exports.down = async function (knex) {
const isSQLite = knex.client.dialect === "sqlite3";
if (isSQLite) {
// Restore original indexes
await knex.raw("DROP INDEX IF EXISTS monitor_important_time_index");
await knex.raw("DROP INDEX IF EXISTS heartbeat_important_index");
await knex.schema.alterTable("heartbeat", function (table) {
table.index(["monitor_id", "important", "time"], "monitor_important_time_index");
table.index(["important"]);
});
}
// For MariaDB/MySQL: No changes
};

View File

@ -1,30 +0,0 @@
exports.up = async function (knex) {
const notifications = await knex("notification").select("id", "config");
const lineNotifyIDs = [];
for (const { id, config } of notifications) {
try {
const parsedConfig = JSON.parse(config || "{}");
const type = typeof parsedConfig.type === "string" ? parsedConfig.type.toLowerCase() : "";
if (type === "linenotify" || type === "line-notify") {
lineNotifyIDs.push(id);
}
} catch (error) {
// Ignore invalid JSON blobs here; they are handled elsewhere in the app.
}
}
if (lineNotifyIDs.length === 0) {
return;
}
await knex.transaction(async (trx) => {
await trx("monitor_notification").whereIn("notification_id", lineNotifyIDs).del();
await trx("notification").whereIn("id", lineNotifyIDs).del();
});
};
exports.down = async function () {
// Removal of LINE Notify configs is not reversible.
};

View File

@ -1,11 +0,0 @@
exports.up = async function (knex) {
await knex.schema.alterTable("monitor", (table) => {
table.string("snmp_v3_username", 255);
});
};
exports.down = async function (knex) {
await knex.schema.alterTable("monitor", (table) => {
table.dropColumn("snmp_v3_username");
});
};

View File

@ -1,12 +0,0 @@
// Change dns_last_result column from VARCHAR(255) to TEXT to handle longer DNS TXT records
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.text("dns_last_result").alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("dns_last_result", 255).alter();
});
};

View File

@ -1,186 +0,0 @@
// Migration to update monitor.game from GameDig v4 to v5 game IDs
// Reference: https://github.com/gamedig/node-gamedig/blob/master/MIGRATE_IDS.md
// Lookup table mapping v4 game IDs to v5 game IDs
const gameDig4to5IdMap = {
americasarmypg: "aapg",
"7d2d": "sdtd",
as: "actionsource",
ageofchivalry: "aoc",
arkse: "ase",
arcasimracing: "asr08",
arma: "aaa",
arma2oa: "a2oa",
armacwa: "acwa",
armar: "armaresistance",
armare: "armareforger",
armagetron: "armagetronadvanced",
bat1944: "battalion1944",
bf1942: "battlefield1942",
bfv: "battlefieldvietnam",
bf2: "battlefield2",
bf2142: "battlefield2142",
bfbc2: "bbc2",
bf3: "battlefield3",
bf4: "battlefield4",
bfh: "battlefieldhardline",
bd: "basedefense",
bs: "bladesymphony",
buildandshoot: "bas",
cod4: "cod4mw",
callofjuarez: "coj",
chivalry: "cmw",
commandos3: "c3db",
cacrenegade: "cacr",
contactjack: "contractjack",
cs15: "counterstrike15",
cs16: "counterstrike16",
cs2: "counterstrike2",
crossracing: "crce",
darkesthour: "dhe4445",
daysofwar: "dow",
deadlydozenpt: "ddpt",
dh2005: "deerhunter2005",
dinodday: "ddd",
dirttrackracing2: "dtr2",
dmc: "deathmatchclassic",
dnl: "dal",
drakan: "dootf",
dys: "dystopia",
em: "empiresmod",
empyrion: "egs",
f12002: "formulaone2002",
flashpointresistance: "ofr",
fivem: "gta5f",
forrest: "theforrest",
graw: "tcgraw",
graw2: "tcgraw2",
giantscitizenkabuto: "gck",
ges: "goldeneyesource",
gore: "gus",
hldm: "hld",
hldms: "hlds",
hlopfor: "hlof",
hl2dm: "hl2d",
hidden: "thehidden",
had2: "hiddendangerous2",
igi2: "i2cs",
il2: "il2sturmovik",
insurgencymic: "imic",
isle: "theisle",
jamesbondnightfire: "jb007n",
jc2mp: "jc2m",
jc3mp: "jc3m",
kingpin: "kloc",
kisspc: "kpctnc",
kspdmp: "kspd",
kzmod: "kreedzclimbing",
left4dead: "l4d",
left4dead2: "l4d2",
m2mp: "m2m",
mohsh: "mohaas",
mohbt: "mohaab",
mohab: "moha",
moh2010: "moh",
mohwf: "mohw",
minecraftbe: "mbe",
mtavc: "gtavcmta",
mtasa: "gtasamta",
ns: "naturalselection",
ns2: "naturalselection2",
nwn: "neverwinternights",
nwn2: "neverwinternights2",
nolf: "tonolf",
nolf2: "nolf2asihw",
pvkii: "pvak2",
ps: "postscriptum",
primalcarnage: "pce",
pc: "projectcars",
pc2: "projectcars2",
prbf2: "prb2",
przomboid: "projectzomboid",
quake1: "quake",
quake3: "q3a",
ragdollkungfu: "rdkf",
r6: "rainbowsix",
r6roguespear: "rs2rs",
r6ravenshield: "rs3rs",
redorchestraost: "roo4145",
redm: "rdr2r",
riseofnations: "ron",
rs2: "rs2v",
samp: "gtasam",
saomp: "gtasao",
savage2: "s2ats",
ss: "serioussam",
ss2: "serioussam2",
ship: "theship",
sinep: "sinepisodes",
sonsoftheforest: "sotf",
swbf: "swb",
swbf2: "swb2",
swjk: "swjkja",
swjk2: "swjk2jo",
takeonhelicopters: "toh",
tf2: "teamfortress2",
terraria: "terrariatshock",
tribes1: "t1s",
ut: "unrealtournament",
ut2003: "unrealtournament2003",
ut2004: "unrealtournament2004",
ut3: "unrealtournament3",
v8supercar: "v8sc",
vcmp: "vcm",
vs: "vampireslayer",
wheeloftime: "wot",
wolfenstein2009: "wolfenstein",
wolfensteinet: "wet",
wurm: "wurmunlimited",
};
/**
* Migrate game IDs from v4 to v5
* @param {import("knex").Knex} knex - Knex instance
* @returns {Promise<void>}
*/
exports.up = async function (knex) {
await knex.transaction(async (trx) => {
// Get all monitors that use the gamedig type
const monitors = await trx("monitor").select("id", "game").where("type", "gamedig").whereNotNull("game");
// Update each monitor with the new game ID if it needs migration
for (const monitor of monitors) {
const oldGameId = monitor.game;
const newGameId = gameDig4to5IdMap[oldGameId];
if (newGameId) {
await trx("monitor").where("id", monitor.id).update({ game: newGameId });
}
}
});
};
/**
* Revert game IDs from v5 back to v4
* @param {import("knex").Knex} knex - Knex instance
* @returns {Promise<void>}
*/
exports.down = async function (knex) {
// Create reverse mapping from the same LUT
const gameDig5to4IdMap = Object.fromEntries(Object.entries(gameDig4to5IdMap).map(([v4, v5]) => [v5, v4]));
await knex.transaction(async (trx) => {
// Get all monitors that use the gamedig type
const monitors = await trx("monitor").select("id", "game").where("type", "gamedig").whereNotNull("game");
// Revert each monitor back to the old game ID if it was migrated
for (const monitor of monitors) {
const newGameId = monitor.game;
const oldGameId = gameDig5to4IdMap[newGameId];
if (oldGameId) {
await trx("monitor").where("id", monitor.id).update({ game: oldGameId });
}
}
});
};

View File

@ -1,11 +0,0 @@
exports.up = async function (knex) {
await knex.schema.alterTable("status_page", function (table) {
table.string("rss_title", 255);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("status_page", function (table) {
table.dropColumn("rss_title");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("expected_tls_alert", 50).defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("expected_tls_alert");
});
};

View File

@ -1,14 +0,0 @@
// Ensure domain column is VARCHAR(255) across all database types.
// This migration ensures MySQL, SQLite, and MariaDB have consistent column type,
// even if a user installed 2.1.0-beta.0 or 2.1.0-beta.1 which had TEXT type for this column.
// Maximum domain name length is 253 characters (255 octets on the wire).
// Note: The unique constraint is already present from the original migration.
exports.up = function (knex) {
return knex.schema.alterTable("domain_expiry", function (table) {
table.string("domain", 255).notNullable().alter();
});
};
exports.down = function (knex) {
// No rollback needed - keeping VARCHAR(255) is the correct state
};

View File

@ -1,43 +0,0 @@
exports.up = function (knex) {
return knex.schema
.alterTable("heartbeat", function (table) {
table.bigInteger("ping").alter();
})
.alterTable("stat_minutely", function (table) {
table.float("ping", 20, 2).notNullable().alter();
table.float("ping_min", 20, 2).notNullable().defaultTo(0).alter();
table.float("ping_max", 20, 2).notNullable().defaultTo(0).alter();
})
.alterTable("stat_daily", function (table) {
table.float("ping", 20, 2).notNullable().alter();
table.float("ping_min", 20, 2).notNullable().defaultTo(0).alter();
table.float("ping_max", 20, 2).notNullable().defaultTo(0).alter();
})
.alterTable("stat_hourly", function (table) {
table.float("ping", 20, 2).notNullable().alter();
table.float("ping_min", 20, 2).notNullable().defaultTo(0).alter();
table.float("ping_max", 20, 2).notNullable().defaultTo(0).alter();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("heartbeat", function (table) {
table.integer("ping").alter();
})
.alterTable("stat_minutely", function (table) {
table.float("ping").notNullable().alter();
table.float("ping_min").notNullable().defaultTo(0).alter();
table.float("ping_max").notNullable().defaultTo(0).alter();
})
.alterTable("stat_daily", function (table) {
table.float("ping").notNullable().alter();
table.float("ping_min").notNullable().defaultTo(0).alter();
table.float("ping_max").notNullable().defaultTo(0).alter();
})
.alterTable("stat_hourly", function (table) {
table.float("ping").notNullable().alter();
table.float("ping_min").notNullable().defaultTo(0).alter();
table.float("ping_max").notNullable().defaultTo(0).alter();
});
};

View File

@ -1,12 +0,0 @@
exports.up = function (knex) {
// Add new column to table monitor for json-query retry behavior
return knex.schema.alterTable("monitor", function (table) {
table.boolean("retry_only_on_status_code_failure").defaultTo(false).notNullable();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("retry_only_on_status_code_failure");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.integer("screenshot_delay").notNullable().unsigned().defaultTo(0);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("screenshot_delay");
});
};

View File

@ -11,9 +11,13 @@ https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Template ## Template
```js ```js
exports.up = function (knex) {}; exports.up = function(knex) {
exports.down = function (knex) {}; };
exports.down = function(knex) {
};
// exports.config = { transaction: false }; // exports.config = { transaction: false };
``` ```
@ -23,28 +27,29 @@ exports.down = function (knex) {};
Filename: 2023-06-30-1348-create-user-and-product.js Filename: 2023-06-30-1348-create-user-and-product.js
```js ```js
exports.up = function (knex) { exports.up = function(knex) {
return knex.schema return knex.schema
.createTable("user", function (table) { .createTable('user', function (table) {
table.increments("id"); table.increments('id');
table.string("first_name", 255).notNullable(); table.string('first_name', 255).notNullable();
table.string("last_name", 255).notNullable(); table.string('last_name', 255).notNullable();
}) })
.createTable("product", function (table) { .createTable('product', function (table) {
table.increments("id"); table.increments('id');
table.decimal("price").notNullable(); table.decimal('price').notNullable();
table.string("name", 1000).notNullable(); table.string('name', 1000).notNullable();
}) }).then(() => {
.then(() => { knex("products").insert([
knex("products").insert([ { price: 10, name: "Apple" },
{ price: 10, name: "Apple" }, { price: 20, name: "Orange" },
{ price: 20, name: "Orange" }, ]);
]);
}); });
}; };
exports.down = function (knex) { exports.down = function(knex) {
return knex.schema.dropTable("product").dropTable("user"); return knex.schema
.dropTable("product")
.dropTable("user");
}; };
``` ```

View File

@ -2,17 +2,11 @@
# Build in Golang # Build in Golang
# Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck # Run npm run build-healthcheck-armv7 in the host first, another it will be super slow where it is building the armv7 healthcheck
############################################ ############################################
FROM golang:1-buster FROM golang:1.19-buster
WORKDIR /app WORKDIR /app
ARG TARGETPLATFORM ARG TARGETPLATFORM
COPY ./extra/ ./extra/ COPY ./extra/ ./extra/
## Switch to archive.debian.org
RUN sed -i '/^deb/s/^/#/' /etc/apt/sources.list \
&& echo "deb http://archive.debian.org/debian buster main contrib non-free" | tee -a /etc/apt/sources.list \
&& echo "deb http://archive.debian.org/debian-security buster/updates main contrib non-free" | tee -a /etc/apt/sources.list \
&& echo "deb http://archive.debian.org/debian buster-updates main contrib non-free" | tee -a /etc/apt/sources.list
# Compile healthcheck.go # Compile healthcheck.go
RUN apt update && \ RUN apt update && \
apt --yes --no-install-recommends install curl && \ apt --yes --no-install-recommends install curl && \

View File

@ -1,15 +1,5 @@
# Download Apprise deb package
FROM node:22-bookworm-slim AS download-apprise
WORKDIR /app
COPY ./extra/download-apprise.mjs ./download-apprise.mjs
RUN apt update && \
apt --yes --no-install-recommends install curl && \
npm install cheerio semver && \
node ./download-apprise.mjs
# Base Image (Slim)
# If the image changed, the second stage image should be changed too # If the image changed, the second stage image should be changed too
FROM node:22-bookworm-slim AS base2-slim FROM node:20-bookworm-slim AS base2-slim
ARG TARGETPLATFORM ARG TARGETPLATFORM
# Specify --no-install-recommends to skip unused dependencies, make the base much smaller! # Specify --no-install-recommends to skip unused dependencies, make the base much smaller!
@ -37,9 +27,8 @@ RUN apt update && \
# apprise = for notifications (Install from the deb package, as the stable one is too old) (workaround for #4867) # apprise = for notifications (Install from the deb package, as the stable one is too old) (workaround for #4867)
# Switching to testing repo is no longer working, as the testing repo is not bookworm anymore. # Switching to testing repo is no longer working, as the testing repo is not bookworm anymore.
# python3-paho-mqtt (#4859) # python3-paho-mqtt (#4859)
# TODO: no idea how to delete the deb file after installation as it becomes a layer already RUN curl http://ftp.debian.org/debian/pool/main/a/apprise/apprise_1.8.0-2_all.deb --output apprise.deb && \
COPY --from=download-apprise /app/apprise.deb ./apprise.deb apt update && \
RUN apt update && \
apt --yes --no-install-recommends install ./apprise.deb python3-paho-mqtt && \ apt --yes --no-install-recommends install ./apprise.deb python3-paho-mqtt && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
rm -f apprise.deb && \ rm -f apprise.deb && \
@ -47,9 +36,9 @@ RUN apt update && \
# Install cloudflared # Install cloudflared
RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \ RUN curl https://pkg.cloudflare.com/cloudflare-main.gpg --output /usr/share/keyrings/cloudflare-main.gpg && \
echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bookworm main' | tee /etc/apt/sources.list.d/cloudflared.list && \ echo 'deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared bullseye main' | tee /etc/apt/sources.list.d/cloudflared.list && \
apt update && \ apt update && \
apt install --yes --no-install-recommends cloudflared && \ apt install --yes --no-install-recommends -t stable cloudflared && \
cloudflared version && \ cloudflared version && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
apt --yes autoremove apt --yes autoremove

View File

@ -1,4 +1,4 @@
version: "3.8" version: '3.8'
services: services:
uptime-kuma: uptime-kuma:
@ -9,5 +9,6 @@ services:
- ../server:/app/server - ../server:/app/server
- ../db:/app/db - ../db:/app/db
ports: ports:
- "3001:3001" # <Host Port>:<Container Port> - "3001:3001" # <Host Port>:<Container Port>
- "3307:3306" - "3307:3306"

View File

@ -27,6 +27,7 @@ RUN mkdir ./data
# ⭐ Main Image # ⭐ Main Image
############################################ ############################################
FROM $BASE_IMAGE AS release FROM $BASE_IMAGE AS release
USER node
WORKDIR /app WORKDIR /app
LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma" LABEL org.opencontainers.image.source="https://github.com/louislam/uptime-kuma"
@ -45,7 +46,6 @@ CMD ["node", "server/server.js"]
# Rootless Image # Rootless Image
############################################ ############################################
FROM release AS rootless FROM release AS rootless
USER node
############################################ ############################################
# Mark as Nightly # Mark as Nightly
@ -79,10 +79,6 @@ USER node
RUN git config --global user.email "no-reply@no-reply.com" RUN git config --global user.email "no-reply@no-reply.com"
RUN git config --global user.name "PR Tester" RUN git config --global user.name "PR Tester"
RUN git clone https://github.com/louislam/uptime-kuma.git . RUN git clone https://github.com/louislam/uptime-kuma.git .
# Hide the warning when running in detached head state
RUN git config --global advice.detachedHead false
RUN npm ci RUN npm ci
EXPOSE 3000 3001 EXPOSE 3000 3001

View File

@ -1,8 +1,6 @@
module.exports = { module.exports = {
apps: [ apps: [{
{ name: "uptime-kuma",
name: "uptime-kuma", script: "./server/server.js",
script: "./server/server.js", }]
},
],
}; };

View File

@ -1,6 +1,3 @@
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const pkg = require("../../package.json"); const pkg = require("../../package.json");
const fs = require("fs"); const fs = require("fs");
const childProcess = require("child_process"); const childProcess = require("child_process");
@ -8,7 +5,7 @@ const util = require("../../src/util");
util.polyfill(); util.polyfill();
const version = process.env.RELEASE_BETA_VERSION; const version = process.env.VERSION;
console.log("Beta Version: " + version); console.log("Beta Version: " + version);
@ -19,26 +16,18 @@ if (!version || !version.includes("-beta.")) {
const exists = tagExists(version); const exists = tagExists(version);
if (!exists) { if (! exists) {
// Process package.json // Process package.json
pkg.version = version; pkg.version = version;
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n"); fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json // Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm"; const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const resultVersion = childProcess.spawnSync(npm, ["--no-git-tag-version", "version", version], { shell: true }); childProcess.spawnSync(npm, [ "install" ]);
if (resultVersion.error) {
console.error(resultVersion.error);
console.error("error npm version!");
process.exit(1);
}
const resultInstall = childProcess.spawnSync(npm, ["install"], { shell: true });
if (resultInstall.error) {
console.error(resultInstall.error);
console.error("error update package-lock!");
process.exit(1);
}
commit(version); commit(version);
tag(version);
} else { } else {
console.log("version tag exists, please delete the tag or use another tag"); console.log("version tag exists, please delete the tag or use another tag");
process.exit(1); process.exit(1);
@ -53,7 +42,7 @@ if (!exists) {
function commit(version) { function commit(version) {
let msg = "Update to " + version; let msg = "Update to " + version;
let res = childProcess.spawnSync("git", ["commit", "-m", msg, "-a"]); let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim(); let stdout = res.stdout.toString().trim();
console.log(stdout); console.log(stdout);
@ -61,13 +50,21 @@ function commit(version) {
throw new Error("commit error"); throw new Error("commit error");
} }
// Get the current branch name res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
res = childProcess.spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"]); console.log(res.stdout.toString().trim());
let branchName = res.stdout.toString().trim(); }
console.log("Current branch:", branchName);
// Git push the branch /**
childProcess.spawnSync("git", ["push", "origin", branchName, "--force"], { stdio: "inherit" }); * Create a tag with the specified version
* @param {string} version Tag to create
* @returns {void}
*/
function tag(version) {
let res = childProcess.spawnSync("git", [ "tag", version ]);
console.log(res.stdout.toString().trim());
res = childProcess.spawnSync("git", [ "push", "origin", version ]);
console.log(res.stdout.toString().trim());
} }
/** /**
@ -77,11 +74,11 @@ function commit(version) {
* @throws Version is not valid * @throws Version is not valid
*/ */
function tagExists(version) { function tagExists(version) {
if (!version) { if (! version) {
throw new Error("invalid version"); throw new Error("invalid version");
} }
let res = childProcess.spawnSync("git", ["tag", "-l", version]); let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version; return res.stdout.toString().trim() === version;
} }

View File

@ -14,9 +14,7 @@ if (platform === "linux/arm/v7") {
console.log("Already built in the host, skip."); console.log("Already built in the host, skip.");
process.exit(0); process.exit(0);
} else { } else {
console.log( console.log("prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build.");
"prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build."
);
} }
} else { } else {
if (fs.existsSync("./extra/healthcheck-armv7")) { if (fs.existsSync("./extra/healthcheck-armv7")) {
@ -26,3 +24,4 @@ if (platform === "linux/arm/v7") {
const output = childProcess.execSync("go build -x -o ./extra/healthcheck ./extra/healthcheck.go").toString("utf8"); const output = childProcess.execSync("go build -x -o ./extra/healthcheck ./extra/healthcheck.go").toString("utf8");
console.log(output); console.log(output);

View File

@ -1,72 +0,0 @@
import fs from "fs";
const dir = "./db/knex_migrations";
// Get the file list (ending with .js) from the directory
const files = fs.readdirSync(dir).filter((file) => file !== "README.md");
// They are wrong, but they had been merged, so allowed.
const exceptionList = [
"2024-08-24-000-add-cache-bust.js",
"2024-10-1315-rabbitmq-monitor.js",
];
// Correct format: YYYY-MM-DD-HHmm-description.js
for (const file of files) {
if (exceptionList.includes(file)) {
continue;
}
// Check ending with .js
if (!file.endsWith(".js")) {
console.error(`It should end with .js: ${file}`);
process.exit(1);
}
const parts = file.split("-");
// Should be at least 5 parts
if (parts.length < 5) {
console.error(`Invalid format: ${file}`);
process.exit(1);
}
// First part should be a year >= 2024
const year = parseInt(parts[0], 10);
if (isNaN(year) || year < 2023) {
console.error(`Invalid year: ${file}`);
process.exit(1);
}
// Second part should be a month
const month = parseInt(parts[1], 10);
if (isNaN(month) || month < 1 || month > 12) {
console.error(`Invalid month: ${file}`);
process.exit(1);
}
// Third part should be a day
const day = parseInt(parts[2], 10);
if (isNaN(day) || day < 1 || day > 31) {
console.error(`Invalid day: ${file}`);
process.exit(1);
}
// Fourth part should be HHmm
const time = parts[3];
// Check length is 4
if (time.length !== 4) {
console.error(`Invalid time: ${file}`);
process.exit(1);
}
const hour = parseInt(time.substring(0, 2), 10);
const minute = parseInt(time.substring(2), 10);
if (isNaN(hour) || hour < 0 || hour > 23 || isNaN(minute) || minute < 0 || minute > 59) {
console.error(`Invalid time: ${file}`);
process.exit(1);
}
}
console.log("All knex filenames are correct.");

View File

@ -1,27 +0,0 @@
// For #5231
const fs = require("fs");
let path = "./src/lang";
// list directories in the lang directory
let jsonFileList = fs.readdirSync(path);
for (let jsonFile of jsonFileList) {
if (!jsonFile.endsWith(".json")) {
continue;
}
let jsonPath = path + "/" + jsonFile;
let originalContent = fs.readFileSync(jsonPath, "utf8");
let langData = JSON.parse(originalContent);
let formattedContent = JSON.stringify(langData, null, 4) + "\n";
if (originalContent !== formattedContent) {
console.error(`File ${jsonFile} is not formatted correctly.`);
process.exit(1);
}
}
console.log("All lang json files are formatted correctly.");

33
extra/checkout-pr.js Normal file
View File

@ -0,0 +1,33 @@
const childProcess = require("child_process");
if (!process.env.UPTIME_KUMA_GH_REPO) {
console.error("Please set a repo to the environment variable 'UPTIME_KUMA_GH_REPO' (e.g. mhkarimi1383:goalert-notification)");
process.exit(1);
}
let inputArray = process.env.UPTIME_KUMA_GH_REPO.split(":");
if (inputArray.length !== 2) {
console.error("Invalid format. Please set a repo to the environment variable 'UPTIME_KUMA_GH_REPO' (e.g. mhkarimi1383:goalert-notification)");
}
let name = inputArray[0];
let branch = inputArray[1];
console.log("Checkout pr");
// Checkout the pr
let result = childProcess.spawnSync("git", [ "remote", "add", name, `https://github.com/${name}/uptime-kuma` ]);
console.log(result.stdout.toString());
console.error(result.stderr.toString());
result = childProcess.spawnSync("git", [ "fetch", name, branch ]);
console.log(result.stdout.toString());
console.error(result.stderr.toString());
result = childProcess.spawnSync("git", [ "checkout", `${name}/${branch}`, "--force" ]);
console.log(result.stdout.toString());
console.error(result.stderr.toString());

Some files were not shown because too many files have changed in this diff Show More