Compare commits

..

10 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
d4d5216666 Merge branch 'master' into copilot/fix-migration-error-on-text-column 2026-01-06 20:27:50 +00:00
copilot-swe-agent[bot]
7b22d562bc Merge branch 'master' into copilot/fix-migration-error-on-text-column 2026-01-06 19:42:29 +00:00
copilot-swe-agent[bot]
063287aa18 Merge branch 'master' into copilot/fix-migration-error-on-text-column 2026-01-06 19:34:44 +00:00
copilot-swe-agent[bot]
873dd5c95f Restore migration changes (undo previous revert)
Co-authored-by: CommanderStorm <26258709+CommanderStorm@users.noreply.github.com>
2026-01-06 19:31:00 +00:00
copilot-swe-agent[bot]
b49c5aa463 Revert migration changes as domain expiry monitor is being removed
Co-authored-by: CommanderStorm <26258709+CommanderStorm@users.noreply.github.com>
2026-01-06 19:09:19 +00:00
copilot-swe-agent[bot]
4e28b99799 Use @testcontainers/mysql instead of GenericContainer for MySQL test
Co-authored-by: CommanderStorm <26258709+CommanderStorm@users.noreply.github.com>
2026-01-06 18:01:45 +00:00
copilot-swe-agent[bot]
d1465199d8 Simplify fix migration as requested in code review
Co-authored-by: CommanderStorm <26258709+CommanderStorm@users.noreply.github.com>
2026-01-06 17:53:06 +00:00
copilot-swe-agent[bot]
c6696d6a77 Address code review feedback
Co-authored-by: CommanderStorm <26258709+CommanderStorm@users.noreply.github.com>
2026-01-06 17:39:20 +00:00
copilot-swe-agent[bot]
17885b5438 Fix domain column from TEXT to VARCHAR(255) for MySQL compatibility
Co-authored-by: CommanderStorm <26258709+CommanderStorm@users.noreply.github.com>
2026-01-06 17:33:09 +00:00
copilot-swe-agent[bot]
1f5d184b11 Initial plan 2026-01-06 17:21:36 +00:00
459 changed files with 9139 additions and 18647 deletions

View File

@ -1,5 +1,9 @@
module.exports = {
ignorePatterns: ["test/*.js", "server/modules/*", "src/util.js"],
ignorePatterns: [
"test/*.js",
"server/modules/*",
"src/util.js"
],
root: true,
env: {
browser: true,
@ -11,7 +15,6 @@ module.exports = {
"eslint:recommended",
"plugin:vue/vue3-recommended",
"plugin:jsdoc/recommended-error",
"prettier", // Disables ESLint formatting rules that conflict with Prettier
],
parser: "vue-eslint-parser",
parserOptions: {
@ -19,93 +22,148 @@ module.exports = {
sourceType: "module",
requireConfigFile: false,
},
plugins: ["jsdoc", "@typescript-eslint"],
plugins: [
"jsdoc",
"@typescript-eslint",
],
rules: {
yoda: "error",
eqeqeq: ["warn", "smart"],
camelcase: [
"warn",
"yoda": "error",
eqeqeq: [ "warn", "smart" ],
"linebreak-style": [ "error", "unix" ],
"camelcase": [ "warn", {
"properties": "never",
"ignoreImports": true
}],
"no-unused-vars": [ "warn", {
"args": "none"
}],
indent: [
"error",
4,
{
properties: "never",
ignoreImports: true,
},
],
"no-unused-vars": [
"warn",
{
args: "none",
ignoredNodes: [ "TemplateLiteral" ],
SwitchCase: 1,
},
],
quotes: [ "error", "double" ],
semi: "error",
"vue/html-indent": [ "error", 4 ], // default: 2
"vue/max-attributes-per-line": "off",
"vue/singleline-html-element-content-newline": "off",
"vue/html-self-closing": "off",
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"vue/require-component-is": "off", // not allow is="style" https://github.com/vuejs/eslint-plugin-vue/issues/462#issuecomment-430234675
"vue/attribute-hyphenation": "off", // This change noNL to "no-n-l" unexpectedly
"vue/multi-word-component-names": "off",
curly: "error",
"no-multi-spaces": [ "error", {
ignoreEOLComments: true,
}],
"array-bracket-spacing": [ "warn", "always", {
"singleValue": true,
"objectsInArrays": false,
"arraysInArrays": false
}],
"space-before-function-paren": [ "error", {
"anonymous": "always",
"named": "never",
"asyncArrow": "always"
}],
"curly": "error",
"object-curly-spacing": [ "error", "always" ],
"object-curly-newline": "off",
"object-property-newline": "error",
"comma-spacing": "error",
"brace-style": "error",
"no-var": "error",
"key-spacing": "warn",
"keyword-spacing": "warn",
"space-infix-ops": "error",
"arrow-spacing": "warn",
"no-throw-literal": "error",
"no-constant-condition": [
"error",
{
checkLoops: false,
},
],
"no-trailing-spaces": "error",
"no-constant-condition": [ "error", {
"checkLoops": false,
}],
"space-before-blocks": "warn",
//"no-console": "warn",
"no-extra-boolean-cast": "off",
"no-multiple-empty-lines": [ "warn", {
"max": 1,
"maxBOF": 0,
}],
"lines-between-class-members": [ "warn", "always", {
exceptAfterSingleLine: true,
}],
"no-unneeded-ternary": "error",
"array-bracket-newline": [ "error", "consistent" ],
"eol-last": [ "error", "always" ],
//"prefer-template": "error",
"no-empty": [
"error",
{
allowEmptyCatch: true,
},
],
"template-curly-spacing": [ "warn", "never" ],
"comma-dangle": [ "warn", "only-multiline" ],
"no-empty": [ "error", {
"allowEmptyCatch": true
}],
"no-control-regex": "off",
"one-var": ["error", "never"],
"max-statements-per-line": ["error", { max: 1 }],
"one-var": [ "error", "never" ],
"max-statements-per-line": [ "error", { "max": 1 }],
"jsdoc/check-tag-names": [
"error",
{
definedTags: ["link"],
},
"definedTags": [ "link" ]
}
],
"jsdoc/no-undefined-types": "off",
"jsdoc/no-defaults": ["error", { noOptionalParamNames: true }],
"jsdoc/no-defaults": [
"error",
{ "noOptionalParamNames": true }
],
"jsdoc/require-throws": "warn",
"jsdoc/require-jsdoc": [
"error",
{
require: {
FunctionDeclaration: true,
MethodDefinition: true,
},
},
"require": {
"FunctionDeclaration": true,
"MethodDefinition": true,
}
}
],
"jsdoc/no-blank-block-descriptions": "error",
"jsdoc/require-returns-description": "warn",
"jsdoc/require-returns-check": ["error", { reportMissingReturnForUndefinedTypes: false }],
"jsdoc/require-returns-check": [
"error",
{ "reportMissingReturnForUndefinedTypes": false }
],
"jsdoc/require-returns": [
"warn",
{
forceRequireReturn: true,
forceReturnsWithAsync: true,
},
"forceRequireReturn": true,
"forceReturnsWithAsync": true
}
],
"jsdoc/require-param-type": "warn",
"jsdoc/require-param-description": "warn",
"jsdoc/require-param-description": "warn"
},
overrides: [
"overrides": [
{
"files": [ "src/languages/*.js", "src/icon.js" ],
"rules": {
"comma-dangle": [ "error", "always-multiline" ],
}
},
// Override for TypeScript
{
files: ["**/*.ts"],
extends: ["plugin:@typescript-eslint/recommended"],
rules: {
"files": [
"**/*.ts",
],
extends: [
"plugin:@typescript-eslint/recommended",
],
"rules": {
"jsdoc/require-returns-type": "off",
"jsdoc/require-param-type": "off",
"@typescript-eslint/no-explicit-any": "off",
"prefer-const": "off",
},
},
],
}
}
]
};

View File

@ -12,10 +12,10 @@ body:
## ❗ IMPORTANT: DO NOT SHARE VULNERABILITY DETAILS HERE
## Please do not open issues for upstream dependency scan results.
Automated security tools often report false-positive issues that are not exploitable in the context of Uptime Kuma.
Reviewing these without concrete impact does not scale for us.
If you can demonstrate that an upstream issue is actually exploitable in Uptime Kuma (e.g. with a PoC or reproducible steps), were happy to take a look.
### ⚠️ Report a Security Vulnerability

View File

@ -1,31 +1,36 @@
# Summary
<sub> To keep reviews fast and effective, please make sure youve [read our pull request guidelines](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma)</sub>
In this pull request, the following changes are made:
## 📝 Summary of changes done and why they are done
- Foobar was changed to FooFoo, because ...
<!-- Provide a clear summary of the purpose and scope of this pull request-->
## 📋 Related issues
<!--Please link any GitHub issues or tasks that this pull request addresses-->
- Relates to #issue-number <!--this links related the issue-->
- Resolves #issue-number <!--this auto-closes the issue-->
## 📄 Checklist
<details>
<summary>Please follow this checklist to avoid unnecessary back and forth (click to expand)</summary>
- [ ] ⚠️ If there are Breaking change (a fix or feature that alters existing functionality in a way that could cause issues) I have called them out
- [ ] 🧠 I have disclosed any use of LLMs/AI in this contribution and reviewed all generated content.
I understand that I am responsible for and able to explain every line of code I submit.
- [ ] 🔍 Any UI changes adhere to visual style of this project.
- [ ] 🛠️ I have self-reviewed and self-tested my code to ensure it works as expected.
- [ ] 🔍 My code adheres to the style guidelines of this project.
- [ ] ⚠️ My changes generate no new warnings.
- [ ] 🛠️ I have reviewed and tested my code.
- [ ] 📝 I have commented my code, especially in hard-to-understand areas (e.g., using JSDoc for methods).
- [ ] 🤖 I added or updated automated tests where appropriate.
- [ ] 📄 Documentation updates are included (if applicable).
- [ ] 🔒 I have considered potential security impacts and mitigated risks.
- [ ] 🧰 Dependency updates are listed and explained.
- [ ] ⚠️ CI passes and is green.
</details>
## Screenshots for Visual Changes
## 📷 Screenshots or Visual Changes
<!--
If this pull request introduces visual changes, please provide the following details.

View File

@ -90,9 +90,9 @@ correct authorization and authentication mechanisms are in place.
### Security Best Practices
- Ensure that the code is free from common vulnerabilities like **SQL
injection**, **XSS attacks**, and **insecure API calls**.
injection**, **XSS attacks**, and **insecure API calls**.
- Check for proper encryption of sensitive data, and ensure that **passwords**
or **API tokens** are not hardcoded in the code.
or **API tokens** are not hardcoded in the code.
## Performance
@ -105,7 +105,7 @@ like load times, memory usage, or other performance aspects.
- Have the right libraries been chosen?
- Are there unnecessary dependencies that might reduce performance or increase
code complexity?
code complexity?
- Are these dependencies actively maintained and free of known vulnerabilities?
### Performance Best Practices
@ -113,7 +113,7 @@ like load times, memory usage, or other performance aspects.
- **Measure performance** using tools like Lighthouse or profiling libraries.
- **Avoid unnecessary dependencies** that may bloat the codebase.
- Ensure that the **code does not degrade the user experience** (e.g., by
increasing load times or memory consumption).
increasing load times or memory consumption).
## Compliance and Integration
@ -187,9 +187,9 @@ the PR can be approved. Some examples of **significant issues** include:
- Missing tests for new functionality.
- Identified **security vulnerabilities**.
- Code changes that break **backward compatibility** without a proper migration
plan.
plan.
- Code that causes **major performance regressions** (e.g., high CPU/memory
usage).
usage).
## After the Review

View File

@ -18,26 +18,22 @@
## Build & Validation Commands
### Prerequisites
- Node.js >= 20.4.0, npm >= 9.3, Git
### Essential Command Sequence
1. **Install Dependencies**:
```bash
npm ci # Use npm ci NOT npm install (~60-90 seconds)
```
2. **Linting** (required before committing):
```bash
npm run lint # Both linters (~15-30 seconds)
npm run lint:prod # For production (zero warnings)
```
3. **Build Frontend**:
```bash
npm run build # Takes ~90-120 seconds, builds to dist/
```
@ -109,7 +105,6 @@ npm run dev # Starts frontend (port 3000) and backend (port 3001)
## CI/CD Workflows
**auto-test.yml** (runs on PR/push to master/1.23.X):
- Linting, building, backend tests on multiple OS/Node versions (15 min timeout)
- E2E Playwright tests
@ -134,7 +129,7 @@ npm run dev # Starts frontend (port 3000) and backend (port 3001)
## Database
- Primary: SQLite (also supports MariaDB/MySQL)
- Primary: SQLite (also supports MariaDB/MySQL/PostgreSQL)
- Migrations in `db/knex_migrations/` using Knex.js
- Filename format validated by CI: `node ./extra/check-knex-filenames.mjs`
@ -147,9 +142,7 @@ npm run dev # Starts frontend (port 3000) and backend (port 3001)
## Adding New Features
### New Notification Provider
Files to modify:
1. `server/notification-providers/PROVIDER_NAME.js` (backend logic)
2. `server/notification.js` (register provider)
3. `src/components/notifications/PROVIDER_NAME.vue` (frontend UI)
@ -158,9 +151,7 @@ Files to modify:
6. `src/lang/en.json` (add translation keys)
### New Monitor Type
Files to modify:
1. `server/monitor-types/MONITORING_TYPE.js` (backend logic)
2. `server/uptime-kuma-server.js` (register monitor type)
3. `src/pages/EditMonitor.vue` (frontend UI)

View File

@ -1,18 +1,18 @@
name: Auto Test
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-server
cancel-in-progress: true
name: Auto Test
on:
push:
branches: [master, 1.23.X, 3.0.0]
branches: [ master, 1.23.X, 3.0.0 ]
pull_request:
permissions: {}
jobs:
auto-test:
runs-on: ${{ matrix.os }}
timeout-minutes: 15
permissions:
contents: read
@ -21,39 +21,39 @@ jobs:
matrix:
os: [macos-latest, ubuntu-22.04, windows-latest, ubuntu-22.04-arm]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
node: [20, 24]
node: [ 20, 24 ]
# Also test non-LTS, but only on Ubuntu.
include:
- os: ubuntu-22.04
node: 25
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node }}
- run: npm clean-install --no-fund
- name: Rebuild native modules for ARM64
if: matrix.os == 'ubuntu-22.04-arm'
run: npm rebuild @louislam/sqlite3
- run: npm run build
- run: npm run test-backend
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node }}
- run: npm clean-install --no-fund
- name: Rebuild native modules for ARM64
if: matrix.os == 'ubuntu-22.04-arm'
run: npm rebuild @louislam/sqlite3
- run: npm run build
- run: npm run test-backend
env:
HEADLESS_TEST: 1
JUST_FOR_TEST: ${{ secrets.JUST_FOR_TEST }}
# As a lot of dev dependencies are not supported on ARMv7, we have to test it separately and just test if `npm ci --production` works
armv7-simple-test:
@ -63,7 +63,7 @@ jobs:
strategy:
fail-fast: false
matrix:
node: [20, 22]
node: [ 20, 22 ]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/
steps:
@ -92,23 +92,23 @@ jobs:
contents: read
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- run: npm clean-install --no-fund
- run: npm run lint:prod
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- run: npm clean-install --no-fund
- run: npm run lint:prod
e2e-test:
runs-on: ubuntu-22.04-arm
@ -117,28 +117,28 @@ jobs:
env:
PLAYWRIGHT_VERSION: ~1.39.0
steps:
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- run: git config --global core.autocrlf false # Mainly for Windows
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
- name: Setup Node.js
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
- run: npm clean-install --no-fund
- name: Setup Node.js
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 22
- run: npm clean-install --no-fund
- name: Rebuild native modules for ARM64
run: npm rebuild @louislam/sqlite3
- name: Rebuild native modules for ARM64
run: npm rebuild @louislam/sqlite3
- name: Install Playwright ${{ env.PLAYWRIGHT_VERSION }}
run: npx playwright@${{ env.PLAYWRIGHT_VERSION }} install
- name: Install Playwright ${{ env.PLAYWRIGHT_VERSION }}
run: npx playwright@${{ env.PLAYWRIGHT_VERSION }} install
- run: npm run build
- run: npm run test-e2e
- run: npm run build
- run: npm run test-e2e

View File

@ -2,7 +2,7 @@ name: autofix.ci
on:
push:
branches: ["master", "1.23.X"]
branches: [ "master", "1.23.X"]
pull_request:
permissions: {}
@ -14,7 +14,7 @@ jobs:
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
@ -30,10 +30,6 @@ jobs:
- name: Install dependencies
run: npm ci
- name: Update RDAP DNS data from IANA
run: wget -O server/model/rdap-dns.json https://data.iana.org/rdap/dns.json
continue-on-error: true
- name: Auto-fix JavaScript/Vue linting issues
run: npm run lint-fix:js
continue-on-error: true
@ -41,13 +37,5 @@ jobs:
- name: Auto-fix CSS/SCSS linting issues
run: npm run lint-fix:style
continue-on-error: true
- name: Auto-format code with Prettier
run: npm run fmt
continue-on-error: true
- name: Compile TypeScript
run: npm run tsc
continue-on-error: true
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@ -1,93 +0,0 @@
name: Beta Release
on:
workflow_dispatch:
inputs:
version:
description: "Beta version number (e.g., 2.1.0-beta.2)"
required: true
type: string
previous_version:
description: "Previous version tag for changelog (e.g., 2.1.0-beta.1)"
required: true
type: string
dry_run:
description: "Dry Run (The docker image will not be pushed to registries. PR will still be created.)"
required: false
type: boolean
default: false
permissions:
contents: write
pull-requests: write
jobs:
beta-release:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: master
persist-credentials: true
fetch-depth: 0 # Fetch all history for changelog generation
- name: Set up Node.js
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 24
- name: Create release branch
env:
VERSION: ${{ inputs.version }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@github.com/${{ github.repository }}.git"
# Delete remote branch if it exists
git push origin --delete "release-${VERSION}" || true
# Delete local branch if it exists
git branch -D "release-${VERSION}" || true
# For testing purpose
# git checkout beta-workflow
git checkout -b "release-${VERSION}"
- name: Install dependencies
run: npm clean-install --no-fund
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Run release-beta
env:
RELEASE_BETA_VERSION: ${{ inputs.version }}
RELEASE_PREVIOUS_VERSION: ${{ inputs.previous_version }}
DRY_RUN: ${{ inputs.dry_run }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: npm run release-beta
- name: Upload dist.tar.gz as artifact
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
with:
name: dist-${{ inputs.version }}
path: ./tmp/dist.tar.gz
retention-days: 90

View File

@ -1,48 +0,0 @@
name: Build Docker Base Images
on:
workflow_dispatch: # Allow manual trigger
permissions: {}
jobs:
build-docker-base:
runs-on: ubuntu-latest
timeout-minutes: 120
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- name: Build and push base2-slim image
run: npm run build-docker-base-slim
- name: Build and push base2 image
run: npm run build-docker-base

View File

@ -17,15 +17,15 @@ jobs:
node-version: [20]
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node-version }}
- run: npm ci
- name: Close incorrect issue
run: node extra/close-incorrect-issue.js ${{ secrets.GITHUB_TOKEN }} ${{ github.event.issue.number }} "$ISSUE_USER_LOGIN"
env:
ISSUE_USER_LOGIN: ${{ github.event.issue.user.login }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: ${{ matrix.node-version }}
- run: npm ci
- name: Close incorrect issue
run: node extra/close-incorrect-issue.js ${{ secrets.GITHUB_TOKEN }} ${{ github.event.issue.number }} "$ISSUE_USER_LOGIN"
env:
ISSUE_USER_LOGIN: ${{ github.event.issue.user.login }}

View File

@ -2,11 +2,11 @@ name: "CodeQL"
on:
push:
branches: ["master", "1.23.X"]
branches: [ "master", "1.23.X"]
pull_request:
branches: ["master", "1.23.X"]
branches: [ "master", "1.23.X"]
schedule:
- cron: "16 22 * * 0"
- cron: '16 22 * * 0'
jobs:
analyze:
@ -22,34 +22,34 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ["go", "javascript-typescript"]
language: [ 'go', 'javascript-typescript' ]
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
languages: ${{ matrix.language }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
languages: ${{ matrix.language }}
- name: Autobuild
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
- name: Autobuild
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
category: "/language:${{matrix.language}}"
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
category: "/language:${{matrix.language}}"
zizmor:
runs-on: ubuntu-latest
permissions:
security-events: write
contents: read
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Run zizmor
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
runs-on: ubuntu-latest
permissions:
security-events: write
contents: read
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Run zizmor
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0

View File

@ -26,5 +26,5 @@ jobs:
- name: Apply label
uses: eps1lon/actions-label-merge-conflict@1df065ebe6e3310545d4f4c4e862e43bdca146f0 # v3.0.3
with:
dirtyLabel: "needs:resolve-merge-conflict"
repoToken: "${{ secrets.GITHUB_TOKEN }}"
dirtyLabel: 'needs:resolve-merge-conflict'
repoToken: '${{ secrets.GITHUB_TOKEN }}'

View File

@ -1,65 +0,0 @@
name: Mark PR as draft when changes are requested
# pull_request_target is safe here because:
# 1. Does not use any external actions; only uses the GitHub CLI via run commands
# 2. Has minimal permissions
# 3. Doesn't checkout or execute any untrusted code from PRs
# 4. Only adds/removes labels or changes the draft status
on: # zizmor: ignore[dangerous-triggers]
pull_request_target:
types:
- review_submitted
- labeled
- ready_for_review
permissions: {}
jobs:
mark-draft:
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: |
(
github.event.action == 'review_submitted' &&
github.event.review.state == 'changes_requested'
) || (
github.event.action == 'labeled' &&
github.event.label.name == 'pr:please address review comments'
)
steps:
- name: Add label on requested changes
if: github.event.review.state == 'changes_requested'
env:
GH_TOKEN: ${{ github.token }}
run: |
gh issue edit "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--add-label "pr:please address review comments"
- name: Mark PR as draft
env:
GH_TOKEN: ${{ github.token }}
run: |
gh pr ready "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--undo || true
# || true to ignore the case where the pr is already a draft
ready-for-review:
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: github.event.action == 'ready_for_review'
steps:
- name: Update labels for review
env:
GH_TOKEN: ${{ github.token }}
run: |
gh issue edit "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--remove-label "pr:please address review comments" || true
gh issue edit "${{ github.event.pull_request.number }}" \
--repo "${{ github.repository }}" \
--add-label "pr:needs review"

View File

@ -1,40 +0,0 @@
name: New contributor message
on:
# Safety
# This workflow uses pull_request_target so it can run with write permissions on first-time contributor PRs.
# It is safe because it does not check out or execute any code from the pull request and
# only uses the pinned, trusted plbstl/first-contribution action
pull_request_target: # zizmor: ignore[dangerous-triggers]
types: [opened, closed]
branches:
- master
permissions:
pull-requests: write
jobs:
build:
if: github.repository == 'louislam/uptime-kuma'
name: Hello new contributor
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: plbstl/first-contribution@4b2b042fffa26792504a18e49aa9543a87bec077 # v4.1.0
with:
pr-reactions: rocket
pr-opened-msg: >
Hello and thanks for lending a paw to Uptime Kuma! 🐻👋
As this is your first contribution, please be sure to check out our [Pull Request guidelines](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma).
In particular:
- Mark your PR as Draft while youre still making changes
- Mark it as Ready for review once its fully ready
If you have any design or process questions, feel free to ask them right here in this pull request - unclear documentation is a bug too.
pr-merged-msg: >
@{fc-author} congrats on your first contribution to Uptime Kuma! 🐻
We hope you enjoy contributing to our project and look forward to seeing more of your work in the future!
If you want to see your contribution in action, please see our [nightly builds here](https://hub.docker.com/layers/louislam/uptime-kuma/nightly2).

View File

@ -1,58 +0,0 @@
name: Nightly Release
on:
schedule:
# Runs at 2:00 AM UTC every day
- cron: "0 2 * * *"
workflow_dispatch: # Allow manual trigger
permissions: {}
jobs:
release-nightly:
runs-on: ubuntu-latest
timeout-minutes: 120
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with: { persist-credentials: false }
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Use Node.js 20
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version: 20
- name: Cache/Restore node_modules
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
id: node-modules-cache
with:
path: node_modules
key: node-modules-${{ runner.os }}-node20-${{ hashFiles('**/package-lock.json') }}
- name: Install dependencies
run: npm clean-install --no-fund
- name: Run release-nightly
run: npm run release-nightly

View File

@ -1,31 +0,0 @@
name: "PR Metadata"
# if someone opens a PR, edits it, or reopens it we want to validate the title
# This is separate from the rest of the CI as the title may change without code changes
on:
# SECURITY: pull_request_target is used here to allow validation of PRs from forks.
# This is safe because:
# 1. No code from the PR is checked out
# 2. Permissions are restricted to pull-requests: read
# 3. Only a trusted third-party action is used to validate the PR title
# 4. No user-controlled code is executed
pull_request_target: # zizmor: ignore[dangerous-triggers]
types:
- opened
- edited
- reopened
- synchronize
permissions:
pull-requests: read
jobs:
pr-title:
name: Validate PR title follows https://conventionalcommits.org
runs-on: ubuntu-latest
permissions:
pull-requests: read
steps:
- uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6.1.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -17,3 +17,4 @@ jobs:
# Regex, /src/lang/*.json is not allowed to be changed, except for /src/lang/en.json
pattern: '^(?!src/lang/en\.json$)src/lang/.*\.json$'
trustedAuthors: UptimeKumaBot

View File

@ -1,8 +1,8 @@
name: "Automatically close stale issues"
name: 'Automatically close stale issues'
on:
workflow_dispatch:
schedule:
- cron: "0 */6 * * *"
- cron: '0 */6 * * *'
#Run every 6 hours
permissions: {}
@ -22,8 +22,8 @@ jobs:
days-before-close: 7
days-before-pr-stale: -1
days-before-pr-close: -1
exempt-issue-labels: "News,discussion,bug,doc,feature-request"
exempt-issue-assignees: "louislam"
exempt-issue-labels: 'News,discussion,bug,doc,feature-request'
exempt-issue-assignees: 'louislam'
operations-per-run: 200
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
with:
@ -31,7 +31,7 @@ jobs:
This issue was marked as `cannot-reproduce` by a maintainer.
If an issue is non-reproducible, we cannot fix it, as we do not know what the underlying issue is.
If you have any ideas how we can reproduce this issue, we would love to hear them.
We don't have a good way to deal with truely unreproducible issues and are going to close this issue in a month.
If think there might be other differences in our environment or in how we tried to reproduce this, we would appreciate any ideas.
close-issue-message: |-
@ -41,5 +41,6 @@ jobs:
days-before-close: 30
days-before-pr-stale: -1
days-before-pr-close: -1
any-of-issue-labels: "cannot-reproduce"
any-of-issue-labels: 'cannot-reproduce'
operations-per-run: 200

View File

@ -1,2 +0,0 @@
# language files
src/lang/*.json

View File

@ -1,65 +0,0 @@
/**
* Prettier Configuration for Uptime Kuma
*
* Usage:
* npm run fmt - Format all files (auto-runs in CI via autofix workflow)
* npm run fmt -- --check - Check formatting without making changes
*
* TIP: This formatter is automatically run in CI, so no need to worry about it
*/
module.exports = {
// Core formatting options - matching original ESLint rules
semi: true,
singleQuote: false,
trailingComma: "es5",
printWidth: 120,
tabWidth: 4,
useTabs: false,
endOfLine: "lf",
arrowParens: "always",
bracketSpacing: true,
bracketSameLine: false,
// Vue-specific settings
vueIndentScriptAndStyle: false,
singleAttributePerLine: false,
htmlWhitespaceSensitivity: "ignore", // More forgiving with whitespace in HTML
// Override settings for specific file types
overrides: [
{
files: "*.vue",
options: {
parser: "vue",
},
},
{
files: ["*.json"],
options: {
tabWidth: 4,
trailingComma: "none",
},
},
{
files: ["*.yml", "*.yaml"],
options: {
tabWidth: 2,
trailingComma: "none",
},
},
{
files: ["src/icon.js"],
options: {
trailingComma: "all",
},
},
{
files: ["*.md"],
options: {
printWidth: 100,
proseWrap: "preserve",
tabWidth: 2,
},
},
],
};

View File

@ -1,11 +1,10 @@
{
"extends": [
"stylelint-config-standard",
"stylelint-config-prettier"
],
"extends": "stylelint-config-standard",
"customSyntax": "postcss-html",
"rules": {
"indentation": 4,
"no-descending-specificity": null,
"selector-list-comma-newline-after": null,
"declaration-empty-line-before": null,
"alpha-value-notation": "number",
"color-function-notation": "legacy",

View File

@ -54,7 +54,8 @@ to review the appropriate one for your contribution.
[**PLEASE SEE OUR SECURITY POLICY.**](SECURITY.md)
[advisory]: https://github.com/louislam/uptime-kuma/security/advisories/new
[issue]: https://github.com/louislam/uptime-kuma/issues/new?template=security_issue.yml
[issue]:
https://github.com/louislam/uptime-kuma/issues/new?template=security_issue.yml
</p>
</details>
@ -64,6 +65,7 @@ to review the appropriate one for your contribution.
If you come across a bug and think you can solve, we appreciate your work.
Please make sure that you follow these rules:
- keep the PR as small as possible, fix only one thing at a time => keeping it
reviewable
- test that your code does what you claim it does.
@ -77,15 +79,16 @@ to review the appropriate one for your contribution.
- <details><summary><b>Translations / Internationalisation (i18n)</b> (click to expand)</summary>
<p>
Please add **all** strings that are translatable to `src/lang/en.json`. If translation keys are omitted, they cannot be translated. **Do not include any other languages in your initial pull request** (even if it is your mother tongue) to avoid merge conflicts between Weblate and `master`. Once your PR is merged into `master`, the strings can be translated by awesome people donating their language skills.
We use Weblate to localise this project into many languages. If you want to help translate Uptime Kuma into your language, please see [these instructions on how to translate using Weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
Please add **all** strings that are translatable to `src/lang/en.json`. If translation keys are omitted, they cannot be translated. **Do not include any other languages in your initial pull request** (even if it is your mother tongue) to avoid merge conflicts between Weblate and `master`. Once your PR is merged into `master`, the strings can be translated by awesome people donating their language skills.
We use Weblate to localise this project into many languages. If you want to help translate Uptime Kuma into your language, please see [these instructions on how to translate using Weblate](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
There are some cases where a change cannot be done directly in Weblate and requires a PR:
- A text may not yet be localisable. In this case, **adding a new language key** via `{{ $t("Translation key") }}` or [`<i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html) might be necessary.
- Language keys need to be **added to `en.json`** to appear in Weblate. If this has not been done, a PR is appreciated.
- **Adding a new language** requires creating a new file. See [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
- A text may not yet be localisable. In this case, **adding a new language key** via `{{ $t("Translation key") }}` or [`<i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html) might be necessary.
- Language keys need to be **added to `en.json`** to appear in Weblate. If this has not been done, a PR is appreciated.
- **Adding a new language** requires creating a new file. See [these instructions](https://github.com/louislam/uptime-kuma/blob/master/src/lang/README.md).
<sub>Because maintainer time is precious, junior maintainers may merge uncontroversial PRs in this area.</sub>
</p>
@ -95,6 +98,7 @@ to review the appropriate one for your contribution.
<p>
To set up a new notification provider these files need to be modified/created:
- `server/notification-providers/PROVIDER_NAME.js` is where the heart of the
notification provider lives.
@ -131,6 +135,7 @@ to review the appropriate one for your contribution.
translations (`{{ $t("Translation key") }}`,
[`i18n-t keypath="Translation key">`](https://vue-i18n.intlify.dev/guide/advanced/component.html))
in `src/lang/en.json` to enable our translators to translate this
- `src/components/notifications/index.js` is where the frontend of the
provider needs to be registered. _If you have an idea how we can skip this
step, we would love to hear about it ^^_
@ -142,6 +147,7 @@ to review the appropriate one for your contribution.
To make sure you have tested the notification provider, please include
screenshots of the following events in the pull-request description:
- `UP`/`DOWN`
- Certificate Expiry via <https://expired.badssl.com/>
- Domain Expiry via <https://google.com/> and a larger time set
@ -153,7 +159,7 @@ to review the appropriate one for your contribution.
```md
| Event | Before | After |
| ------------------ | --------------------- | -------------------- |
|--------------------|-----------------------|----------------------|
| `UP` | ![Before](image-link) | ![After](image-link) |
| `DOWN` | ![Before](image-link) | ![After](image-link) |
| Certificate-expiry | ![Before](image-link) | ![After](image-link) |
@ -171,6 +177,7 @@ to review the appropriate one for your contribution.
<p>
To set up a new notification provider these files need to be modified/created:
- `server/monitor-types/MONITORING_TYPE.js` is the core of each monitor.
The `async check(...)`-function should:
- in the happy-path: set `heartbeat.msg` to a successful message and set `heartbeat.status = UP`
@ -213,6 +220,7 @@ to review the appropriate one for your contribution.
<p>
Contributing is easy and fun. We will guide you through the process:
1. **Fork** the [Uptime-Kuma repository](https://github.com/louislam/uptime-kuma/) and **clone** it to your local machine.
2. **Create a new branch** for your changes (e.g., `signal-notification-provider`).
3. **Make your changes** and **commit** them with a clear message.
@ -227,6 +235,7 @@ to review the appropriate one for your contribution.
A PR should remain in **draft status** until all tasks are completed.
Only change the status to **Ready for Review** when:
- You have implemented all planned changes.
- Your code is fully tested and ready for review.
- You have updated or created the necessary tests.
@ -239,6 +248,7 @@ to review the appropriate one for your contribution.
- Merging multiple issues by a huge PR is more difficult to review and causes
conflicts with other PRs. Please
- (if possible) **create one PR for one issue** or
- (if not possible) **explain which issues a PR addresses and why this PR
should not be broken apart**
@ -259,9 +269,7 @@ to review the appropriate one for your contribution.
### Continuous Integration
All pull requests must pass our continuous integration checks. These checks include:
- **Linting**: We use ESLint and Stylelint for code quality checks. You can run the linter locally with `npm run lint`.
- **Formatting**: We use Prettier for code formatting. You can format your code with `npm run fmt` (or CI will do this for you)
- **Linting**: We use ESLint and Stylelint to enforce code style. You can run the linter locally with `npm run lint`.
- **Testing**: We use Playwright for end-to-end tests and have a suite of backend tests. You can run the tests locally with `npm test`.
I ([@louislam](https://github.com/louislam)) have the final say.
@ -288,11 +296,13 @@ you can finally start the app. The goal is to make the Uptime Kuma installation
as easy as installing a mobile app.
- Easy to install for non-Docker users
- no native build dependency is needed (for `x86_64`/`armv7`/`arm64`)
- no extra configuration and
- no extra effort required to get it running
- Single container for Docker users
- no complex docker-compose file
- mapping the volume and exposing the port should be the only requirements
@ -469,16 +479,18 @@ We have a few procedures we follow. These are documented here:
- <details><summary><b>Set up a Docker Builder</b> (click to expand)</summary>
<p>
- amd64, armv7 using local.
- arm64 using remote arm64 cpu, as the emulator is too slow and can no longer
pass the `npm ci` command.
1. Add the public key to the remote server.
2. Add the remote context. The remote machine must be arm64 and installed
Docker CE.
```bash
docker context create oracle-arm64-jp --docker "host=ssh://root@100.107.174.88"
```
```bash
docker context create oracle-arm64-jp --docker "host=ssh://root@100.107.174.88"
```
3. Create a new builder.
@ -502,6 +514,7 @@ We have a few procedures we follow. These are documented here:
- <details><summary><b>Release</b> (click to expand)</summary>
<p>
1. Draft a release note
2. Make sure the repo is cleared
3. If the healthcheck is updated, remember to re-compile it:
@ -514,6 +527,7 @@ We have a few procedures we follow. These are documented here:
9. Deploy to the demo server: `npm run deploy-demo-server`
These Items need to be checked:
- [ ] Check all tags is fine on
<https://hub.docker.com/r/louislam/uptime-kuma/tags>
- [ ] Try the Docker image with tag 1.X.X (Clean install / amd64 / arm64 /
@ -525,6 +539,7 @@ We have a few procedures we follow. These are documented here:
- <details><summary><b>Release Beta</b> (click to expand)</summary>
<p>
1. Draft a release note, check `This is a pre-release`
2. Make sure the repo is cleared
3. `npm run release-beta` with env vars: `VERSION` and `GITHUB_TOKEN`

View File

@ -6,7 +6,7 @@
Uptime Kuma is an easy-to-use self-hosted monitoring tool.
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/2?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
<a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/stars/louislam/uptime-kuma?style=flat" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/pulls/louislam/uptime-kuma" /></a> <a target="_blank" href="https://hub.docker.com/r/louislam/uptime-kuma"><img src="https://img.shields.io/docker/v/louislam/uptime-kuma/latest?label=docker%20image%20ver." /></a> <a target="_blank" href="https://github.com/louislam/uptime-kuma"><img src="https://img.shields.io/github/last-commit/louislam/uptime-kuma" /></a> <a target="_blank" href="https://opencollective.com/uptime-kuma"><img src="https://opencollective.com/uptime-kuma/total/badge.svg?label=Open%20Collective%20Backers&color=brightgreen" /></a>
[![GitHub Sponsors](https://img.shields.io/github/sponsors/louislam?label=GitHub%20Sponsors)](https://github.com/sponsors/louislam) <a href="https://weblate.kuma.pet/projects/uptime-kuma/uptime-kuma/">
<img src="https://weblate.kuma.pet/widgets/uptime-kuma/-/svg-badge.svg" alt="Translation status" />
</a>
@ -45,7 +45,6 @@ cd uptime-kuma
curl -o compose.yaml https://raw.githubusercontent.com/louislam/uptime-kuma/master/compose.yaml
docker compose up -d
```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
> [!WARNING]
@ -56,7 +55,6 @@ Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001
```bash
docker run -d --restart=always -p 3001:3001 -v uptime-kuma:/app/data --name uptime-kuma louislam/uptime-kuma:2
```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
If you want to limit exposure to localhost only:
@ -65,6 +63,8 @@ If you want to limit exposure to localhost only:
docker run ... -p 127.0.0.1:3001:3001 ...
```
### 💪🏻 Non-Docker
Requirements:
@ -93,7 +93,6 @@ npm install pm2 -g && pm2 install pm2-logrotate
# Start Server
pm2 start server/server.js --name uptime-kuma
```
Uptime Kuma is now running on all network interfaces (e.g. http://localhost:3001 or http://your-ip:3001).
More useful PM2 Commands
@ -175,8 +174,8 @@ You can mention me if you ask a question on the subreddit.
### Create Pull Requests
Pull requests are awesome.
To keep reviews fast and effective, please make sure youve [read our pull request guidelines](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma).
We DO NOT accept all types of pull requests and do not want to waste your time. Please be sure that you have read and follow pull request rules:
[CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma](https://github.com/louislam/uptime-kuma/blob/master/CONTRIBUTING.md#can-i-create-a-pull-request-for-uptime-kuma)
### Test Pull Requests

View File

@ -10,7 +10,7 @@
- Do not report any upstream dependency issues / scan result by any tools. It will be closed immediately without explanations. Unless you have PoC to prove that the upstream issue affected Uptime Kuma.
- Do not use the public issue tracker or discuss it in public as it will cause
more damage.
more damage.
## Do you accept other 3rd-party bug bounty platforms?

View File

@ -0,0 +1,5 @@
module.exports = {
"rootDir": "..",
"testRegex": "./test/backend.spec.js",
};

View File

@ -22,11 +22,10 @@ export default defineConfig({
// Reporter to use
reporter: [
[
"html",
{
"html", {
outputFolder: "../private/playwright-report",
open: "never",
},
}
],
],
@ -48,7 +47,7 @@ export default defineConfig({
{
name: "specs",
use: { ...devices["Desktop Chrome"] },
dependencies: ["run-once setup"],
dependencies: [ "run-once setup" ],
},
/*
{

View File

@ -15,13 +15,13 @@ export default defineConfig({
port: 3000,
},
define: {
FRONTEND_VERSION: JSON.stringify(process.env.npm_package_version),
"FRONTEND_VERSION": JSON.stringify(process.env.npm_package_version),
"process.env": {},
},
plugins: [
vue(),
visualizer({
filename: "tmp/dist-stats.html",
filename: "tmp/dist-stats.html"
}),
viteCompression({
algorithm: "gzip",
@ -36,26 +36,25 @@ export default defineConfig({
srcDir: "src",
filename: "serviceWorker.ts",
strategies: "injectManifest",
injectManifest: {
maximumFileSizeToCacheInBytes: 3 * 1024 * 1024, // 3 MiB
},
}),
],
css: {
postcss: {
parser: postCssScss,
map: false,
plugins: [postcssRTLCSS],
},
"parser": postCssScss,
"map": false,
"plugins": [ postcssRTLCSS ]
}
},
build: {
commonjsOptions: {
include: [/.js$/],
include: [ /.js$/ ],
},
rollupOptions: {
output: {
manualChunks(id, { getModuleInfo, getModuleIds }) {},
},
manualChunks(id, { getModuleInfo, getModuleIds }) {
}
}
},
},
}
});

View File

@ -39,7 +39,7 @@ async function createTables() {
table.integer("user_id").unsigned().notNullable();
table.string("protocol", 10).notNullable();
table.string("host", 255).notNullable();
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
table.smallint("port").notNullable(); // TODO: Maybe a issue with MariaDB, need migration to int
table.boolean("auth").notNullable();
table.string("username", 255).nullable();
table.string("password", 255).nullable();
@ -67,7 +67,10 @@ async function createTables() {
table.increments("id");
table.string("name", 150);
table.boolean("active").notNullable().defaultTo(true);
table.integer("user_id").unsigned().references("id").inTable("user").onDelete("SET NULL").onUpdate("CASCADE");
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.integer("interval").notNullable().defaultTo(20);
table.text("url");
table.string("type", 20);
@ -80,7 +83,7 @@ async function createTables() {
table.boolean("ignore_tls").notNullable().defaultTo(false);
table.boolean("upside_down").notNullable().defaultTo(false);
table.integer("maxredirects").notNullable().defaultTo(10);
table.text("accepted_statuscodes_json").notNullable().defaultTo('["200-299"]');
table.text("accepted_statuscodes_json").notNullable().defaultTo("[\"200-299\"]");
table.string("dns_resolve_type", 5);
table.string("dns_resolve_server", 255);
table.string("dns_last_result", 255);
@ -91,9 +94,11 @@ async function createTables() {
table.text("headers").defaultTo(null);
table.text("basic_auth_user").defaultTo(null);
table.text("basic_auth_pass").defaultTo(null);
table.integer("docker_host").unsigned().references("id").inTable("docker_host");
table.integer("docker_host").unsigned()
.references("id").inTable("docker_host");
table.string("docker_container", 255);
table.integer("proxy_id").unsigned().references("id").inTable("proxy");
table.integer("proxy_id").unsigned()
.references("id").inTable("proxy");
table.boolean("expiry_notification").defaultTo(true);
table.text("mqtt_topic");
table.string("mqtt_success_message", 255);
@ -125,12 +130,8 @@ async function createTables() {
await knex.schema.createTable("heartbeat", (table) => {
table.increments("id");
table.boolean("important").notNullable().defaultTo(false);
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.smallint("status").notNullable();
@ -142,9 +143,9 @@ async function createTables() {
table.integer("down_count").notNullable().defaultTo(0);
table.index("important");
table.index(["monitor_id", "time"], "monitor_time_index");
table.index([ "monitor_id", "time" ], "monitor_time_index");
table.index("monitor_id");
table.index(["monitor_id", "important", "time"], "monitor_important_time_index");
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
});
// incident
@ -165,7 +166,10 @@ async function createTables() {
table.increments("id");
table.string("title", 150).notNullable();
table.text("description").notNullable();
table.integer("user_id").unsigned().references("id").inTable("user").onDelete("SET NULL").onUpdate("CASCADE");
table.integer("user_id").unsigned()
.references("id").inTable("user")
.onDelete("SET NULL")
.onUpdate("CASCADE");
table.boolean("active").notNullable().defaultTo(true);
table.string("strategy", 50).notNullable().defaultTo("single");
table.datetime("start_date");
@ -177,7 +181,7 @@ async function createTables() {
table.integer("interval_day");
table.index("active");
table.index(["strategy", "active"], "manual_active");
table.index([ "strategy", "active" ], "manual_active");
table.index("user_id", "maintenance_user_id");
});
@ -205,21 +209,13 @@ async function createTables() {
await knex.schema.createTable("maintenance_status_page", (table) => {
table.increments("id");
table
.integer("status_page_id")
.unsigned()
.notNullable()
.references("id")
.inTable("status_page")
table.integer("status_page_id").unsigned().notNullable()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table
.integer("maintenance_id")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
});
@ -227,12 +223,8 @@ async function createTables() {
// maintenance_timeslot
await knex.schema.createTable("maintenance_timeslot", (table) => {
table.increments("id");
table
.integer("maintenance_id")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.datetime("start_date").notNullable();
@ -240,51 +232,35 @@ async function createTables() {
table.boolean("generated_next").defaultTo(false);
table.index("maintenance_id");
table.index(["maintenance_id", "start_date", "end_date"], "active_timeslot_index");
table.index([ "maintenance_id", "start_date", "end_date" ], "active_timeslot_index");
table.index("generated_next", "generated_next_index");
});
// monitor_group
await knex.schema.createTable("monitor_group", (table) => {
table.increments("id");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table
.integer("group_id")
.unsigned()
.notNullable()
.references("id")
.inTable("group")
table.integer("group_id").unsigned().notNullable()
.references("id").inTable("group")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("weight").notNullable().defaultTo(1000);
table.boolean("send_url").notNullable().defaultTo(false);
table.index(["monitor_id", "group_id"], "fk");
table.index([ "monitor_id", "group_id" ], "fk");
});
// monitor_maintenance
await knex.schema.createTable("monitor_maintenance", (table) => {
table.increments("id");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table
.integer("maintenance_id")
.unsigned()
.notNullable()
.references("id")
.inTable("maintenance")
table.integer("maintenance_id").unsigned().notNullable()
.references("id").inTable("maintenance")
.onDelete("CASCADE")
.onUpdate("CASCADE");
@ -304,25 +280,17 @@ async function createTables() {
// monitor_notification
await knex.schema.createTable("monitor_notification", (table) => {
table.increments("id").unsigned(); // TODO: no auto increment????
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.increments("id").unsigned(); // TODO: no auto increment????
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table
.integer("notification_id")
.unsigned()
.notNullable()
.references("id")
.inTable("notification")
table.integer("notification_id").unsigned().notNullable()
.references("id").inTable("notification")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.index(["monitor_id", "notification_id"], "monitor_notification_index");
table.index([ "monitor_id", "notification_id" ], "monitor_notification_index");
});
// tag
@ -336,20 +304,12 @@ async function createTables() {
// monitor_tag
await knex.schema.createTable("monitor_tag", (table) => {
table.increments("id");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table
.integer("tag_id")
.unsigned()
.notNullable()
.references("id")
.inTable("tag")
table.integer("tag_id").unsigned().notNullable()
.references("id").inTable("tag")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("value");
@ -358,12 +318,8 @@ async function createTables() {
// monitor_tls_info
await knex.schema.createTable("monitor_tls_info", (table) => {
table.increments("id");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.text("info_json");
@ -375,8 +331,8 @@ async function createTables() {
table.string("type", 50).notNullable();
table.integer("monitor_id").unsigned().notNullable();
table.integer("days").notNullable();
table.unique(["type", "monitor_id", "days"]);
table.index(["type", "monitor_id", "days"], "good_index");
table.unique([ "type", "monitor_id", "days" ]);
table.index([ "type", "monitor_id", "days" ], "good_index");
});
// setting
@ -390,19 +346,16 @@ async function createTables() {
// status_page_cname
await knex.schema.createTable("status_page_cname", (table) => {
table.increments("id");
table
.integer("status_page_id")
.unsigned()
.references("id")
.inTable("status_page")
table.integer("status_page_id").unsigned()
.references("id").inTable("status_page")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.string("domain").notNullable().unique().collate("utf8_general_ci");
});
/*********************
* Converted Patch here
*********************/
* Converted Patch here
*********************/
// 2023-06-30-1348-http-body-encoding.js
// ALTER TABLE monitor ADD http_body_encoding VARCHAR(25);
@ -443,12 +396,8 @@ async function createTables() {
table.increments("id").primary();
table.string("key", 255).notNullable();
table.string("name", 255).notNullable();
table
.integer("user_id")
.unsigned()
.notNullable()
.references("id")
.inTable("user")
table.integer("user_id").unsigned().notNullable()
.references("id").inTable("user")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.dateTime("created_date").defaultTo(knex.fn.now()).notNullable();
@ -481,11 +430,13 @@ async function createTables() {
ALTER TABLE maintenance ADD timezone VARCHAR(255);
ALTER TABLE maintenance ADD duration INTEGER;
*/
await knex.schema.dropTableIfExists("maintenance_timeslot").table("maintenance", function (table) {
table.text("cron");
table.string("timezone", 255);
table.integer("duration");
});
await knex.schema
.dropTableIfExists("maintenance_timeslot")
.table("maintenance", function (table) {
table.text("cron");
table.string("timezone", 255);
table.integer("duration");
});
// 2023-06-30-1413-add-parent-monitor.js.
/*
@ -493,7 +444,10 @@ async function createTables() {
ADD parent INTEGER REFERENCES [monitor] ([id]) ON DELETE SET NULL ON UPDATE CASCADE;
*/
await knex.schema.table("monitor", function (table) {
table.integer("parent").unsigned().references("id").inTable("monitor").onDelete("SET NULL").onUpdate("CASCADE");
table.integer("parent").unsigned()
.references("id").inTable("monitor")
.onDelete("SET NULL")
.onUpdate("CASCADE");
});
/*

View File

@ -3,41 +3,39 @@ exports.up = function (knex) {
.createTable("stat_minutely", function (table) {
table.increments("id");
table.comment("This table contains the minutely aggregate statistics for each monitor");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest minute");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest minute");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique(["monitor_id", "timestamp"]);
table.unique([ "monitor_id", "timestamp" ]);
})
.createTable("stat_daily", function (table) {
table.increments("id");
table.comment("This table contains the daily aggregate statistics for each monitor");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest day");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest day");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique(["monitor_id", "timestamp"]);
table.unique([ "monitor_id", "timestamp" ]);
});
};
exports.down = function (knex) {
return knex.schema.dropTable("stat_minutely").dropTable("stat_daily");
return knex.schema
.dropTable("stat_minutely")
.dropTable("stat_daily");
};

View File

@ -1,13 +1,16 @@
exports.up = function (knex) {
// Add new column heartbeat.end_time
return knex.schema.alterTable("heartbeat", function (table) {
table.datetime("end_time").nullable().defaultTo(null);
});
return knex.schema
.alterTable("heartbeat", function (table) {
table.datetime("end_time").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
// Rename heartbeat.start_time to heartbeat.time
return knex.schema.alterTable("heartbeat", function (table) {
table.dropColumn("end_time");
});
return knex.schema
.alterTable("heartbeat", function (table) {
table.dropColumn("end_time");
});
};

View File

@ -1,12 +1,15 @@
exports.up = function (knex) {
// Add new column heartbeat.retries
return knex.schema.alterTable("heartbeat", function (table) {
table.integer("retries").notNullable().defaultTo(0);
});
return knex.schema
.alterTable("heartbeat", function (table) {
table.integer("retries").notNullable().defaultTo(0);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("heartbeat", function (table) {
table.dropColumn("retries");
});
return knex.schema
.alterTable("heartbeat", function (table) {
table.dropColumn("retries");
});
};

View File

@ -1,13 +1,16 @@
exports.up = function (knex) {
// Add new column monitor.mqtt_check_type
return knex.schema.alterTable("monitor", function (table) {
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("mqtt_check_type", 255).notNullable().defaultTo("keyword");
});
};
exports.down = function (knex) {
// Drop column monitor.mqtt_check_type
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("mqtt_check_type");
});
return knex.schema
.alterTable("monitor", function (table) {
table.dropColumn("mqtt_check_type");
});
};

View File

@ -1,12 +1,14 @@
exports.up = function (knex) {
// update monitor.push_token to 32 length
return knex.schema.alterTable("monitor", function (table) {
table.string("push_token", 32).alter();
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("push_token", 32).alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("push_token", 20).alter();
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("push_token", 20).alter();
});
};

View File

@ -5,14 +5,9 @@ exports.up = function (knex) {
table.string("name", 255).notNullable();
table.string("url", 255).notNullable();
table.integer("user_id").unsigned();
})
.alterTable("monitor", function (table) {
}).alterTable("monitor", function (table) {
// Add new column monitor.remote_browser
table
.integer("remote_browser")
.nullable()
.defaultTo(null)
.unsigned()
table.integer("remote_browser").nullable().defaultTo(null).unsigned()
.index()
.references("id")
.inTable("remote_browser");

View File

@ -1,7 +1,8 @@
exports.up = function (knex) {
return knex.schema.alterTable("status_page", function (table) {
table.integer("auto_refresh_interval").defaultTo(300).unsigned();
});
return knex.schema
.alterTable("status_page", function (table) {
table.integer("auto_refresh_interval").defaultTo(300).unsigned();
});
};
exports.down = function (knex) {

View File

@ -1,29 +1,14 @@
exports.up = function (knex) {
return knex.schema
.alterTable("stat_daily", function (table) {
table
.float("ping_min")
.notNullable()
.defaultTo(0)
.comment("Minimum ping during this period in milliseconds");
table
.float("ping_max")
.notNullable()
.defaultTo(0)
.comment("Maximum ping during this period in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
})
.alterTable("stat_minutely", function (table) {
table
.float("ping_min")
.notNullable()
.defaultTo(0)
.comment("Minimum ping during this period in milliseconds");
table
.float("ping_max")
.notNullable()
.defaultTo(0)
.comment("Maximum ping during this period in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
});
};
exports.down = function (knex) {

View File

@ -1,26 +1,26 @@
exports.up = function (knex) {
return knex.schema.createTable("stat_hourly", function (table) {
table.increments("id");
table.comment("This table contains the hourly aggregate statistics for each monitor");
table
.integer("monitor_id")
.unsigned()
.notNullable()
.references("id")
.inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp").notNullable().comment("Unix timestamp rounded down to the nearest hour");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
return knex.schema
.createTable("stat_hourly", function (table) {
table.increments("id");
table.comment("This table contains the hourly aggregate statistics for each monitor");
table.integer("monitor_id").unsigned().notNullable()
.references("id").inTable("monitor")
.onDelete("CASCADE")
.onUpdate("CASCADE");
table.integer("timestamp")
.notNullable()
.comment("Unix timestamp rounded down to the nearest hour");
table.float("ping").notNullable().comment("Average ping in milliseconds");
table.float("ping_min").notNullable().defaultTo(0).comment("Minimum ping during this period in milliseconds");
table.float("ping_max").notNullable().defaultTo(0).comment("Maximum ping during this period in milliseconds");
table.smallint("up").notNullable();
table.smallint("down").notNullable();
table.unique(["monitor_id", "timestamp"]);
});
table.unique([ "monitor_id", "timestamp" ]);
});
};
exports.down = function (knex) {
return knex.schema.dropTable("stat_hourly");
return knex.schema
.dropTable("stat_hourly");
};

View File

@ -9,6 +9,7 @@ exports.up = function (knex) {
.alterTable("stat_hourly", function (table) {
table.text("extras").defaultTo(null).comment("Extra statistics during this time period");
});
};
exports.down = function (knex) {

View File

@ -1,9 +1,10 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("snmp_oid").defaultTo(null);
table.enum("snmp_version", ["1", "2c", "3"]).defaultTo("2c");
table.string("json_path_operator").defaultTo(null);
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("snmp_oid").defaultTo(null);
table.enum("snmp_version", [ "1", "2c", "3" ]).defaultTo("2c");
table.string("json_path_operator").defaultTo(null);
});
};
exports.down = function (knex) {

View File

@ -1,11 +1,13 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("cache_bust").notNullable().defaultTo(false);
});
return knex.schema
.alterTable("monitor", function (table) {
table.boolean("cache_bust").notNullable().defaultTo(false);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("cache_bust");
});
return knex.schema
.alterTable("monitor", function (table) {
table.dropColumn("cache_bust");
});
};

View File

@ -1,7 +1,8 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.text("conditions").notNullable().defaultTo("[]");
});
return knex.schema
.alterTable("monitor", function (table) {
table.text("conditions").notNullable().defaultTo("[]");
});
};
exports.down = function (knex) {

View File

@ -4,6 +4,7 @@ exports.up = function (knex) {
table.string("rabbitmq_username");
table.string("rabbitmq_password");
});
};
exports.down = function (knex) {
@ -12,4 +13,5 @@ exports.down = function (knex) {
table.dropColumn("rabbitmq_username");
table.dropColumn("rabbitmq_password");
});
};

View File

@ -1,8 +1,9 @@
// Update info_json column to LONGTEXT mainly for MariaDB
exports.up = function (knex) {
return knex.schema.alterTable("monitor_tls_info", function (table) {
table.text("info_json", "longtext").alter();
});
return knex.schema
.alterTable("monitor_tls_info", function (table) {
table.text("info_json", "longtext").alter();
});
};
exports.down = function (knex) {

View File

@ -1,7 +1,8 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("smtp_security").defaultTo(null);
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("smtp_security").defaultTo(null);
});
};
exports.down = function (knex) {

View File

@ -1,9 +1,10 @@
// Add websocket ignore headers and websocket subprotocol
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("ws_ignore_sec_websocket_accept_header").notNullable().defaultTo(false);
table.string("ws_subprotocol", 255).notNullable().defaultTo("");
});
return knex.schema
.alterTable("monitor", function (table) {
table.boolean("ws_ignore_sec_websocket_accept_header").notNullable().defaultTo(false);
table.string("ws_subprotocol", 255).notNullable().defaultTo("");
});
};
exports.down = function (knex) {

View File

@ -4,12 +4,12 @@ exports.up = function (knex) {
.alterTable("status_page", function (table) {
table.renameColumn("google_analytics_tag_id", "analytics_id");
table.string("analytics_script_url");
table.enu("analytics_type", ["google", "umami", "plausible", "matomo"]).defaultTo(null);
})
.then(() => {
table.enu("analytics_type", [ "google", "umami", "plausible", "matomo" ]).defaultTo(null);
}).then(() => {
// After a succesful migration, add google as default for previous pages
knex("status_page").whereNotNull("analytics_id").update({
analytics_type: "google",
"analytics_type": "google",
});
});
};

View File

@ -5,17 +5,20 @@ ALTER TABLE monitor ADD ping_per_request_timeout INTEGER default 2 not null;
*/
exports.up = function (knex) {
// Add new columns to table monitor
return knex.schema.alterTable("monitor", function (table) {
table.integer("ping_count").defaultTo(1).notNullable();
table.boolean("ping_numeric").defaultTo(true).notNullable();
table.integer("ping_per_request_timeout").defaultTo(2).notNullable();
});
return knex.schema
.alterTable("monitor", function (table) {
table.integer("ping_count").defaultTo(1).notNullable();
table.boolean("ping_numeric").defaultTo(true).notNullable();
table.integer("ping_per_request_timeout").defaultTo(2).notNullable();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ping_count");
table.dropColumn("ping_numeric");
table.dropColumn("ping_per_request_timeout");
});
return knex.schema
.alterTable("monitor", function (table) {
table.dropColumn("ping_count");
table.dropColumn("ping_numeric");
table.dropColumn("ping_per_request_timeout");
});
};

View File

@ -1,8 +1,9 @@
// Fix #5721: Change proxy port column type to integer to support larger port numbers
exports.up = function (knex) {
return knex.schema.alterTable("proxy", function (table) {
table.integer("port").alter();
});
return knex.schema
.alterTable("proxy", function (table) {
table.integer("port").alter();
});
};
exports.down = function (knex) {

View File

@ -1,8 +1,9 @@
// Add column custom_url to monitor_group table
exports.up = function (knex) {
return knex.schema.alterTable("monitor_group", function (table) {
table.text("custom_url", "text");
});
return knex.schema
.alterTable("monitor_group", function (table) {
table.text("custom_url", "text");
});
};
exports.down = function (knex) {

View File

@ -1,11 +1,13 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("ip_family").defaultTo(null);
});
return knex.schema
.alterTable("monitor", function (table) {
table.boolean("ip_family").defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("ip_family");
});
return knex.schema
.alterTable("monitor", function (table) {
table.dropColumn("ip_family");
});
};

View File

@ -1,7 +1,8 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("manual_status").defaultTo(null);
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("manual_status").defaultTo(null);
});
};
exports.down = function (knex) {

View File

@ -1,27 +1,28 @@
// Add column last_start_date to maintenance table
exports.up = async function (knex) {
await knex.schema.alterTable("maintenance", function (table) {
table.datetime("last_start_date");
});
await knex.schema
.alterTable("maintenance", function (table) {
table.datetime("last_start_date");
});
// Perform migration for recurring-interval strategy
const recurringMaintenances = await knex("maintenance")
.where({
strategy: "recurring-interval",
cron: "* * * * *",
})
.select("id", "start_time");
const recurringMaintenances = await knex("maintenance").where({
strategy: "recurring-interval",
cron: "* * * * *"
}).select("id", "start_time");
// eslint-disable-next-line camelcase
const maintenanceUpdates = recurringMaintenances.map(async ({ start_time, id }) => {
// eslint-disable-next-line camelcase
const [hourStr, minuteStr] = start_time.split(":");
const [ hourStr, minuteStr ] = start_time.split(":");
const hour = parseInt(hourStr, 10);
const minute = parseInt(minuteStr, 10);
const cron = `${minute} ${hour} * * *`;
await knex("maintenance").where({ id }).update({ cron });
await knex("maintenance")
.where({ id })
.update({ cron });
});
await Promise.all(maintenanceUpdates);
};

View File

@ -1,8 +1,9 @@
// Fix: Change manual_status column type to smallint
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.smallint("manual_status").alter();
});
return knex.schema
.alterTable("monitor", function (table) {
table.smallint("manual_status").alter();
});
};
exports.down = function (knex) {

View File

@ -1,7 +1,8 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("oauth_audience").nullable().defaultTo(null);
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("oauth_audience").nullable().defaultTo(null);
});
};
exports.down = function (knex) {

View File

@ -1,13 +1,15 @@
exports.up = function (knex) {
// Add new column monitor.mqtt_websocket_path
return knex.schema.alterTable("monitor", function (table) {
table.string("mqtt_websocket_path", 255).nullable();
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("mqtt_websocket_path", 255).nullable();
});
};
exports.down = function (knex) {
// Drop column monitor.mqtt_websocket_path
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("mqtt_websocket_path");
});
return knex.schema
.alterTable("monitor", function (table) {
table.dropColumn("mqtt_websocket_path");
});
};

View File

@ -1,14 +1,16 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
// Fix ip_family, change to varchar instead of boolean
// possible values are "ipv4" and "ipv6"
table.string("ip_family", 4).defaultTo(null).alter();
});
return knex.schema
.alterTable("monitor", function (table) {
// Fix ip_family, change to varchar instead of boolean
// possible values are "ipv4" and "ipv6"
table.string("ip_family", 4).defaultTo(null).alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
// Rollback to boolean
table.boolean("ip_family").defaultTo(null).alter();
});
return knex.schema
.alterTable("monitor", function (table) {
// Rollback to boolean
table.boolean("ip_family").defaultTo(null).alter();
});
};

View File

@ -1,15 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.boolean("save_response").notNullable().defaultTo(false);
table.boolean("save_error_response").notNullable().defaultTo(true);
table.integer("response_max_length").notNullable().defaultTo(1024); // Default 1KB
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("save_response");
table.dropColumn("save_error_response");
table.dropColumn("response_max_length");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("heartbeat", function (table) {
table.text("response").nullable().defaultTo(null);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("heartbeat", function (table) {
table.dropColumn("response");
});
};

View File

@ -1,13 +1,15 @@
exports.up = function (knex) {
// Add new column status_page.show_only_last_heartbeat
return knex.schema.alterTable("status_page", function (table) {
table.boolean("show_only_last_heartbeat").notNullable().defaultTo(false);
});
return knex.schema
.alterTable("status_page", function (table) {
table.boolean("show_only_last_heartbeat").notNullable().defaultTo(false);
});
};
exports.down = function (knex) {
// Drop column status_page.show_only_last_heartbeat
return knex.schema.alterTable("status_page", function (table) {
table.dropColumn("show_only_last_heartbeat");
});
return knex.schema
.alterTable("status_page", function (table) {
table.dropColumn("show_only_last_heartbeat");
});
};

View File

@ -9,11 +9,11 @@ exports.up = async function (knex) {
// Create partial indexes with predicate
await knex.schema.alterTable("heartbeat", function (table) {
table.index(["monitor_id", "time"], "monitor_important_time_index", {
predicate: knex.whereRaw("important = 1"),
table.index([ "monitor_id", "time" ], "monitor_important_time_index", {
predicate: knex.whereRaw("important = 1")
});
table.index(["important"], "heartbeat_important_index", {
predicate: knex.whereRaw("important = 1"),
table.index([ "important" ], "heartbeat_important_index", {
predicate: knex.whereRaw("important = 1")
});
});
}
@ -29,8 +29,8 @@ exports.down = async function (knex) {
await knex.raw("DROP INDEX IF EXISTS heartbeat_important_index");
await knex.schema.alterTable("heartbeat", function (table) {
table.index(["monitor_id", "important", "time"], "monitor_important_time_index");
table.index(["important"]);
table.index([ "monitor_id", "important", "time" ], "monitor_important_time_index");
table.index([ "important" ]);
});
}
// For MariaDB/MySQL: No changes

View File

@ -1,11 +0,0 @@
exports.up = async function (knex) {
await knex.schema.alterTable("monitor", (table) => {
table.string("snmp_v3_username", 255);
});
};
exports.down = async function (knex) {
await knex.schema.alterTable("monitor", (table) => {
table.dropColumn("snmp_v3_username");
});
};

View File

@ -1,12 +1,14 @@
// Change dns_last_result column from VARCHAR(255) to TEXT to handle longer DNS TXT records
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.text("dns_last_result").alter();
});
return knex.schema
.alterTable("monitor", function (table) {
table.text("dns_last_result").alter();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.string("dns_last_result", 255).alter();
});
return knex.schema
.alterTable("monitor", function (table) {
table.string("dns_last_result", 255).alter();
});
};

View File

@ -3,139 +3,139 @@
// Lookup table mapping v4 game IDs to v5 game IDs
const gameDig4to5IdMap = {
americasarmypg: "aapg",
"americasarmypg": "aapg",
"7d2d": "sdtd",
as: "actionsource",
ageofchivalry: "aoc",
arkse: "ase",
arcasimracing: "asr08",
arma: "aaa",
arma2oa: "a2oa",
armacwa: "acwa",
armar: "armaresistance",
armare: "armareforger",
armagetron: "armagetronadvanced",
bat1944: "battalion1944",
bf1942: "battlefield1942",
bfv: "battlefieldvietnam",
bf2: "battlefield2",
bf2142: "battlefield2142",
bfbc2: "bbc2",
bf3: "battlefield3",
bf4: "battlefield4",
bfh: "battlefieldhardline",
bd: "basedefense",
bs: "bladesymphony",
buildandshoot: "bas",
cod4: "cod4mw",
callofjuarez: "coj",
chivalry: "cmw",
commandos3: "c3db",
cacrenegade: "cacr",
contactjack: "contractjack",
cs15: "counterstrike15",
cs16: "counterstrike16",
cs2: "counterstrike2",
crossracing: "crce",
darkesthour: "dhe4445",
daysofwar: "dow",
deadlydozenpt: "ddpt",
dh2005: "deerhunter2005",
dinodday: "ddd",
dirttrackracing2: "dtr2",
dmc: "deathmatchclassic",
dnl: "dal",
drakan: "dootf",
dys: "dystopia",
em: "empiresmod",
empyrion: "egs",
f12002: "formulaone2002",
flashpointresistance: "ofr",
fivem: "gta5f",
forrest: "theforrest",
graw: "tcgraw",
graw2: "tcgraw2",
giantscitizenkabuto: "gck",
ges: "goldeneyesource",
gore: "gus",
hldm: "hld",
hldms: "hlds",
hlopfor: "hlof",
hl2dm: "hl2d",
hidden: "thehidden",
had2: "hiddendangerous2",
igi2: "i2cs",
il2: "il2sturmovik",
insurgencymic: "imic",
isle: "theisle",
jamesbondnightfire: "jb007n",
jc2mp: "jc2m",
jc3mp: "jc3m",
kingpin: "kloc",
kisspc: "kpctnc",
kspdmp: "kspd",
kzmod: "kreedzclimbing",
left4dead: "l4d",
left4dead2: "l4d2",
m2mp: "m2m",
mohsh: "mohaas",
mohbt: "mohaab",
mohab: "moha",
moh2010: "moh",
mohwf: "mohw",
minecraftbe: "mbe",
mtavc: "gtavcmta",
mtasa: "gtasamta",
ns: "naturalselection",
ns2: "naturalselection2",
nwn: "neverwinternights",
nwn2: "neverwinternights2",
nolf: "tonolf",
nolf2: "nolf2asihw",
pvkii: "pvak2",
ps: "postscriptum",
primalcarnage: "pce",
pc: "projectcars",
pc2: "projectcars2",
prbf2: "prb2",
przomboid: "projectzomboid",
quake1: "quake",
quake3: "q3a",
ragdollkungfu: "rdkf",
r6: "rainbowsix",
r6roguespear: "rs2rs",
r6ravenshield: "rs3rs",
redorchestraost: "roo4145",
redm: "rdr2r",
riseofnations: "ron",
rs2: "rs2v",
samp: "gtasam",
saomp: "gtasao",
savage2: "s2ats",
ss: "serioussam",
ss2: "serioussam2",
ship: "theship",
sinep: "sinepisodes",
sonsoftheforest: "sotf",
swbf: "swb",
swbf2: "swb2",
swjk: "swjkja",
swjk2: "swjk2jo",
takeonhelicopters: "toh",
tf2: "teamfortress2",
terraria: "terrariatshock",
tribes1: "t1s",
ut: "unrealtournament",
ut2003: "unrealtournament2003",
ut2004: "unrealtournament2004",
ut3: "unrealtournament3",
v8supercar: "v8sc",
vcmp: "vcm",
vs: "vampireslayer",
wheeloftime: "wot",
wolfenstein2009: "wolfenstein",
wolfensteinet: "wet",
wurm: "wurmunlimited",
"as": "actionsource",
"ageofchivalry": "aoc",
"arkse": "ase",
"arcasimracing": "asr08",
"arma": "aaa",
"arma2oa": "a2oa",
"armacwa": "acwa",
"armar": "armaresistance",
"armare": "armareforger",
"armagetron": "armagetronadvanced",
"bat1944": "battalion1944",
"bf1942": "battlefield1942",
"bfv": "battlefieldvietnam",
"bf2": "battlefield2",
"bf2142": "battlefield2142",
"bfbc2": "bbc2",
"bf3": "battlefield3",
"bf4": "battlefield4",
"bfh": "battlefieldhardline",
"bd": "basedefense",
"bs": "bladesymphony",
"buildandshoot": "bas",
"cod4": "cod4mw",
"callofjuarez": "coj",
"chivalry": "cmw",
"commandos3": "c3db",
"cacrenegade": "cacr",
"contactjack": "contractjack",
"cs15": "counterstrike15",
"cs16": "counterstrike16",
"cs2": "counterstrike2",
"crossracing": "crce",
"darkesthour": "dhe4445",
"daysofwar": "dow",
"deadlydozenpt": "ddpt",
"dh2005": "deerhunter2005",
"dinodday": "ddd",
"dirttrackracing2": "dtr2",
"dmc": "deathmatchclassic",
"dnl": "dal",
"drakan": "dootf",
"dys": "dystopia",
"em": "empiresmod",
"empyrion": "egs",
"f12002": "formulaone2002",
"flashpointresistance": "ofr",
"fivem": "gta5f",
"forrest": "theforrest",
"graw": "tcgraw",
"graw2": "tcgraw2",
"giantscitizenkabuto": "gck",
"ges": "goldeneyesource",
"gore": "gus",
"hldm": "hld",
"hldms": "hlds",
"hlopfor": "hlof",
"hl2dm": "hl2d",
"hidden": "thehidden",
"had2": "hiddendangerous2",
"igi2": "i2cs",
"il2": "il2sturmovik",
"insurgencymic": "imic",
"isle": "theisle",
"jamesbondnightfire": "jb007n",
"jc2mp": "jc2m",
"jc3mp": "jc3m",
"kingpin": "kloc",
"kisspc": "kpctnc",
"kspdmp": "kspd",
"kzmod": "kreedzclimbing",
"left4dead": "l4d",
"left4dead2": "l4d2",
"m2mp": "m2m",
"mohsh": "mohaas",
"mohbt": "mohaab",
"mohab": "moha",
"moh2010": "moh",
"mohwf": "mohw",
"minecraftbe": "mbe",
"mtavc": "gtavcmta",
"mtasa": "gtasamta",
"ns": "naturalselection",
"ns2": "naturalselection2",
"nwn": "neverwinternights",
"nwn2": "neverwinternights2",
"nolf": "tonolf",
"nolf2": "nolf2asihw",
"pvkii": "pvak2",
"ps": "postscriptum",
"primalcarnage": "pce",
"pc": "projectcars",
"pc2": "projectcars2",
"prbf2": "prb2",
"przomboid": "projectzomboid",
"quake1": "quake",
"quake3": "q3a",
"ragdollkungfu": "rdkf",
"r6": "rainbowsix",
"r6roguespear": "rs2rs",
"r6ravenshield": "rs3rs",
"redorchestraost": "roo4145",
"redm": "rdr2r",
"riseofnations": "ron",
"rs2": "rs2v",
"samp": "gtasam",
"saomp": "gtasao",
"savage2": "s2ats",
"ss": "serioussam",
"ss2": "serioussam2",
"ship": "theship",
"sinep": "sinepisodes",
"sonsoftheforest": "sotf",
"swbf": "swb",
"swbf2": "swb2",
"swjk": "swjkja",
"swjk2": "swjk2jo",
"takeonhelicopters": "toh",
"tf2": "teamfortress2",
"terraria": "terrariatshock",
"tribes1": "t1s",
"ut": "unrealtournament",
"ut2003": "unrealtournament2003",
"ut2004": "unrealtournament2004",
"ut3": "unrealtournament3",
"v8supercar": "v8sc",
"vcmp": "vcm",
"vs": "vampireslayer",
"wheeloftime": "wot",
"wolfenstein2009": "wolfenstein",
"wolfensteinet": "wet",
"wurm": "wurmunlimited",
};
/**
@ -146,7 +146,10 @@ const gameDig4to5IdMap = {
exports.up = async function (knex) {
await knex.transaction(async (trx) => {
// Get all monitors that use the gamedig type
const monitors = await trx("monitor").select("id", "game").where("type", "gamedig").whereNotNull("game");
const monitors = await trx("monitor")
.select("id", "game")
.where("type", "gamedig")
.whereNotNull("game");
// Update each monitor with the new game ID if it needs migration
for (const monitor of monitors) {
@ -154,7 +157,9 @@ exports.up = async function (knex) {
const newGameId = gameDig4to5IdMap[oldGameId];
if (newGameId) {
await trx("monitor").where("id", monitor.id).update({ game: newGameId });
await trx("monitor")
.where("id", monitor.id)
.update({ game: newGameId });
}
}
});
@ -167,11 +172,16 @@ exports.up = async function (knex) {
*/
exports.down = async function (knex) {
// Create reverse mapping from the same LUT
const gameDig5to4IdMap = Object.fromEntries(Object.entries(gameDig4to5IdMap).map(([v4, v5]) => [v5, v4]));
const gameDig5to4IdMap = Object.fromEntries(
Object.entries(gameDig4to5IdMap).map(([ v4, v5 ]) => [ v5, v4 ])
);
await knex.transaction(async (trx) => {
// Get all monitors that use the gamedig type
const monitors = await trx("monitor").select("id", "game").where("type", "gamedig").whereNotNull("game");
const monitors = await trx("monitor")
.select("id", "game")
.where("type", "gamedig")
.whereNotNull("game");
// Revert each monitor back to the old game ID if it was migrated
for (const monitor of monitors) {
@ -179,7 +189,9 @@ exports.down = async function (knex) {
const oldGameId = gameDig5to4IdMap[newGameId];
if (oldGameId) {
await trx("monitor").where("id", monitor.id).update({ game: oldGameId });
await trx("monitor")
.where("id", monitor.id)
.update({ game: oldGameId });
}
}
});

View File

@ -1,43 +0,0 @@
exports.up = function (knex) {
return knex.schema
.alterTable("heartbeat", function (table) {
table.bigInteger("ping").alter();
})
.alterTable("stat_minutely", function (table) {
table.float("ping", 20, 2).notNullable().alter();
table.float("ping_min", 20, 2).notNullable().defaultTo(0).alter();
table.float("ping_max", 20, 2).notNullable().defaultTo(0).alter();
})
.alterTable("stat_daily", function (table) {
table.float("ping", 20, 2).notNullable().alter();
table.float("ping_min", 20, 2).notNullable().defaultTo(0).alter();
table.float("ping_max", 20, 2).notNullable().defaultTo(0).alter();
})
.alterTable("stat_hourly", function (table) {
table.float("ping", 20, 2).notNullable().alter();
table.float("ping_min", 20, 2).notNullable().defaultTo(0).alter();
table.float("ping_max", 20, 2).notNullable().defaultTo(0).alter();
});
};
exports.down = function (knex) {
return knex.schema
.alterTable("heartbeat", function (table) {
table.integer("ping").alter();
})
.alterTable("stat_minutely", function (table) {
table.float("ping").notNullable().alter();
table.float("ping_min").notNullable().defaultTo(0).alter();
table.float("ping_max").notNullable().defaultTo(0).alter();
})
.alterTable("stat_daily", function (table) {
table.float("ping").notNullable().alter();
table.float("ping_min").notNullable().defaultTo(0).alter();
table.float("ping_max").notNullable().defaultTo(0).alter();
})
.alterTable("stat_hourly", function (table) {
table.float("ping").notNullable().alter();
table.float("ping_min").notNullable().defaultTo(0).alter();
table.float("ping_max").notNullable().defaultTo(0).alter();
});
};

View File

@ -1,12 +0,0 @@
exports.up = function (knex) {
// Add new column to table monitor for json-query retry behavior
return knex.schema.alterTable("monitor", function (table) {
table.boolean("retry_only_on_status_code_failure").defaultTo(false).notNullable();
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("retry_only_on_status_code_failure");
});
};

View File

@ -1,11 +0,0 @@
exports.up = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.integer("screenshot_delay").notNullable().unsigned().defaultTo(0);
});
};
exports.down = function (knex) {
return knex.schema.alterTable("monitor", function (table) {
table.dropColumn("screenshot_delay");
});
};

View File

@ -11,9 +11,13 @@ https://knexjs.org/guide/migrations.html#knexfile-in-other-languages
## Template
```js
exports.up = function (knex) {};
exports.up = function(knex) {
exports.down = function (knex) {};
};
exports.down = function(knex) {
};
// exports.config = { transaction: false };
```
@ -23,28 +27,29 @@ exports.down = function (knex) {};
Filename: 2023-06-30-1348-create-user-and-product.js
```js
exports.up = function (knex) {
exports.up = function(knex) {
return knex.schema
.createTable("user", function (table) {
table.increments("id");
table.string("first_name", 255).notNullable();
table.string("last_name", 255).notNullable();
.createTable('user', function (table) {
table.increments('id');
table.string('first_name', 255).notNullable();
table.string('last_name', 255).notNullable();
})
.createTable("product", function (table) {
table.increments("id");
table.decimal("price").notNullable();
table.string("name", 1000).notNullable();
})
.then(() => {
knex("products").insert([
{ price: 10, name: "Apple" },
{ price: 20, name: "Orange" },
]);
.createTable('product', function (table) {
table.increments('id');
table.decimal('price').notNullable();
table.string('name', 1000).notNullable();
}).then(() => {
knex("products").insert([
{ price: 10, name: "Apple" },
{ price: 20, name: "Orange" },
]);
});
};
exports.down = function (knex) {
return knex.schema.dropTable("product").dropTable("user");
exports.down = function(knex) {
return knex.schema
.dropTable("product")
.dropTable("user");
};
```

View File

@ -1,4 +1,4 @@
version: "3.8"
version: '3.8'
services:
uptime-kuma:
@ -9,5 +9,6 @@ services:
- ../server:/app/server
- ../db:/app/db
ports:
- "3001:3001" # <Host Port>:<Container Port>
- "3001:3001" # <Host Port>:<Container Port>
- "3307:3306"

View File

@ -1,8 +1,6 @@
module.exports = {
apps: [
{
name: "uptime-kuma",
script: "./server/server.js",
},
],
apps: [{
name: "uptime-kuma",
script: "./server/server.js",
}]
};

View File

@ -1,6 +1,3 @@
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const pkg = require("../../package.json");
const fs = require("fs");
const childProcess = require("child_process");
@ -19,26 +16,27 @@ if (!version || !version.includes("-beta.")) {
const exists = tagExists(version);
if (!exists) {
if (! exists) {
// Process package.json
pkg.version = version;
fs.writeFileSync("package.json", JSON.stringify(pkg, null, 4) + "\n");
// Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const resultVersion = childProcess.spawnSync(npm, ["--no-git-tag-version", "version", version], { shell: true });
const resultVersion = childProcess.spawnSync(npm, [ "--no-git-tag-version", "version", version ], { shell: true });
if (resultVersion.error) {
console.error(resultVersion.error);
console.error("error npm version!");
process.exit(1);
}
const resultInstall = childProcess.spawnSync(npm, ["install"], { shell: true });
const resultInstall = childProcess.spawnSync(npm, [ "install" ], { shell: true });
if (resultInstall.error) {
console.error(resultInstall.error);
console.error("error update package-lock!");
process.exit(1);
}
commit(version);
} else {
console.log("version tag exists, please delete the tag or use another tag");
process.exit(1);
@ -53,7 +51,7 @@ if (!exists) {
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", ["commit", "-m", msg, "-a"]);
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
@ -61,13 +59,8 @@ function commit(version) {
throw new Error("commit error");
}
// Get the current branch name
res = childProcess.spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"]);
let branchName = res.stdout.toString().trim();
console.log("Current branch:", branchName);
// Git push the branch
childProcess.spawnSync("git", ["push", "origin", branchName, "--force"], { stdio: "inherit" });
res = childProcess.spawnSync("git", [ "push", "origin", "master" ]);
console.log(res.stdout.toString().trim());
}
/**
@ -77,11 +70,11 @@ function commit(version) {
* @throws Version is not valid
*/
function tagExists(version) {
if (!version) {
if (! version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", ["tag", "-l", version]);
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version;
}

View File

@ -14,9 +14,7 @@ if (platform === "linux/arm/v7") {
console.log("Already built in the host, skip.");
process.exit(0);
} else {
console.log(
"prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build."
);
console.log("prebuilt not found, it will be slow! You should execute `npm run build-healthcheck-armv7` before build.");
}
} else {
if (fs.existsSync("./extra/healthcheck-armv7")) {
@ -26,3 +24,4 @@ if (platform === "linux/arm/v7") {
const output = childProcess.execSync("go build -x -o ./extra/healthcheck ./extra/healthcheck.go").toString("utf8");
console.log(output);

View File

@ -18,7 +18,7 @@ const github = require("@actions/github");
await client.issues.listLabelsOnIssue({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
issue_number: issue.number
})
).data.map(({ name }) => name);
@ -29,7 +29,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
labels: ["invalid-format"],
labels: [ "invalid-format" ]
});
// Add the issue closing comment
@ -37,7 +37,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context necessary for a good discussions.`,
body: `@${username}: Hello! :wave:\n\nThis issue is being automatically closed because it does not follow the issue template. Please **DO NOT open blank issues and use our [issue-templates](https://github.com/louislam/uptime-kuma/issues/new/choose) instead**.\nBlank Issues do not contain the context necessary for a good discussions.`
});
// Close the issue
@ -45,7 +45,7 @@ const github = require("@actions/github");
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
state: "closed",
state: "closed"
});
} else {
console.log("Pass!");
@ -53,4 +53,5 @@ const github = require("@actions/github");
} catch (e) {
console.log(e);
}
})();

View File

@ -1,7 +1,8 @@
require("dotenv").config();
const { NodeSSH } = require("node-ssh");
const readline = require("readline");
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
const rl = readline.createInterface({ input: process.stdin,
output: process.stdout });
const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
(async () => {
@ -12,7 +13,7 @@ const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
host: process.env.UPTIME_KUMA_DEMO_HOST,
port: process.env.UPTIME_KUMA_DEMO_PORT,
username: process.env.UPTIME_KUMA_DEMO_USERNAME,
privateKeyPath: process.env.UPTIME_KUMA_DEMO_PRIVATE_KEY_PATH,
privateKeyPath: process.env.UPTIME_KUMA_DEMO_PRIVATE_KEY_PATH
});
let cwd = process.env.UPTIME_KUMA_DEMO_CWD;
@ -47,6 +48,7 @@ const prompt = (query) => new Promise((resolve) => rl.question(query, resolve));
cwd,
});
console.log(result.stdout + result.stderr);*/
} catch (e) {
console.log(e);
} finally {

View File

@ -26,6 +26,7 @@ function download(url) {
console.log("Extracting dist...");
if (fs.existsSync("./dist")) {
if (fs.existsSync("./dist-backup")) {
fs.rmSync("./dist-backup", {
recursive: true,

View File

@ -4,13 +4,22 @@
import * as childProcess from "child_process";
const ignoreList = ["louislam", "CommanderStorm", "UptimeKumaBot", "weblate", "Copilot", "@autofix-ci[bot]"];
const ignoreList = [
"louislam",
"CommanderStorm",
"UptimeKumaBot",
"weblate",
"Copilot"
];
const mergeList = ["Translations Update from Weblate", "Update dependencies"];
const mergeList = [
"Translations Update from Weblate",
"Update dependencies",
];
const template = `
LLM Task: Please help to put above PRs into the following sections based on their content. If a PR fits multiple sections, choose the most relevant one. If a PR doesn't fit any section, place it in "Others". If there are grammatical errors in the PR titles, please correct them. Don't change the PR numbers and authors, and keep the format. Output as markdown file format.
LLM Task: Please help to put above PRs into the following sections based on their content. If a PR fits multiple sections, choose the most relevant one. If a PR doesn't fit any section, place it in "Others". If there are grammatical errors in the PR titles, please correct them. Don't change the PR numbers and authors, and keep the format. Output as markdown.
Changelog:
@ -28,9 +37,7 @@ Changelog:
- Other small changes, code refactoring and comment/doc updates in this repo:
`;
if (import.meta.main) {
await main();
}
await main();
/**
* Main Function
@ -45,63 +52,60 @@ async function main() {
}
console.log(`Generating changelog since version ${previousVersion}...`);
console.log(await generateChangelog(previousVersion));
}
/**
* Generate Changelog
* @param {string} previousVersion Previous Version Tag
* @returns {Promise<string>} Changelog Content
*/
export async function generateChangelog(previousVersion) {
const prList = await getPullRequestList(previousVersion);
const list = [];
let content = "";
try {
const prList = await getPullRequestList(previousVersion);
const list = [];
let i = 1;
for (const pr of prList) {
console.log(`Progress: ${i++}/${prList.length}`);
let authorSet = await getAuthorList(pr.number);
authorSet = await mainAuthorToFront(pr.author.login, authorSet);
let i = 1;
for (const pr of prList) {
console.log(`Progress: ${i++}/${prList.length}`);
let authorSet = await getAuthorList(pr.number);
authorSet = await mainAuthorToFront(pr.author.login, authorSet);
if (mergeList.includes(pr.title)) {
// Check if it is already in the list
const existingItem = list.find((item) => item.title === pr.title);
if (existingItem) {
existingItem.numbers.push(pr.number);
for (const author of authorSet) {
existingItem.authors.add(author);
// Sort the authors
existingItem.authors = new Set([...existingItem.authors].sort((a, b) => a.localeCompare(b)));
if (mergeList.includes(pr.title)) {
// Check if it is already in the list
const existingItem = list.find(item => item.title === pr.title);
if (existingItem) {
existingItem.numbers.push(pr.number);
for (const author of authorSet) {
existingItem.authors.add(author);
// Sort the authors
existingItem.authors = new Set([ ...existingItem.authors ].sort((a, b) => a.localeCompare(b)));
}
continue;
}
continue;
}
const item = {
numbers: [ pr.number ],
title: pr.title,
authors: authorSet,
};
list.push(item);
}
const item = {
numbers: [pr.number],
title: pr.title,
authors: authorSet,
};
for (const item of list) {
// Concat pr numbers into a string like #123 #456
const prPart = item.numbers.map(num => `#${num}`).join(" ");
list.push(item);
}
// Concat authors into a string like @user1 @user2
let authorPart = [ ...item.authors ].map(author => `@${author}`).join(" ");
for (const item of list) {
// Concat pr numbers into a string like #123 #456
const prPart = item.numbers.map((num) => `#${num}`).join(" ");
if (authorPart) {
authorPart = `(Thanks ${authorPart})`;
}
// Concat authors into a string like @user1 @user2
let authorPart = [...item.authors].map((author) => `@${author}`).join(" ");
if (authorPart) {
authorPart = `(Thanks ${authorPart})`;
console.log(`- ${prPart} ${item.title} ${authorPart}`);
}
content += `- ${prPart} ${item.title} ${authorPart}\n`;
}
console.log(template);
return content + "\n" + template;
} catch (e) {
console.error("Failed to get pull request list:", e);
process.exit(1);
}
}
/**
@ -110,37 +114,28 @@ export async function generateChangelog(previousVersion) {
*/
async function getPullRequestList(previousVersion) {
// Get the date of previousVersion in YYYY-MM-DD format from git
const previousVersionDate = childProcess
.execSync(`git log -1 --format=%cd --date=short ${previousVersion}`)
.toString()
.trim();
const previousVersionDate = childProcess.execSync(`git log -1 --format=%cd --date=short ${previousVersion}`).toString().trim();
if (!previousVersionDate) {
throw new Error(
`Unable to find the date of version ${previousVersion}. Please make sure the version tag exists.`
);
throw new Error(`Unable to find the date of version ${previousVersion}. Please make sure the version tag exists.`);
}
const ghProcess = childProcess.spawnSync(
"gh",
[
"pr",
"list",
"--state",
"merged",
"--base",
"master",
"--search",
`merged:>=${previousVersionDate}`,
"--json",
"number,title,author",
"--limit",
"1000",
],
{
encoding: "utf-8",
}
);
const ghProcess = childProcess.spawnSync("gh", [
"pr",
"list",
"--state",
"merged",
"--base",
"master",
"--search",
`merged:>=${previousVersionDate}`,
"--json",
"number,title,author",
"--limit",
"1000"
], {
encoding: "utf-8"
});
if (ghProcess.error) {
throw ghProcess.error;
@ -158,8 +153,14 @@ async function getPullRequestList(previousVersion) {
* @returns {Promise<Set<string>>} Set of Authors' GitHub Usernames
*/
async function getAuthorList(prID) {
const ghProcess = childProcess.spawnSync("gh", ["pr", "view", prID, "--json", "commits"], {
encoding: "utf-8",
const ghProcess = childProcess.spawnSync("gh", [
"pr",
"view",
prID,
"--json",
"commits"
], {
encoding: "utf-8"
});
if (ghProcess.error) {
@ -184,7 +185,7 @@ async function getAuthorList(prID) {
}
// Sort the set
return new Set([...set].sort((a, b) => a.localeCompare(b)));
return new Set([ ...set ].sort((a, b) => a.localeCompare(b)));
}
/**
@ -196,5 +197,5 @@ async function mainAuthorToFront(mainAuthor, authorSet) {
if (ignoreList.includes(mainAuthor)) {
return authorSet;
}
return new Set([mainAuthor, ...authorSet]);
return new Set([ mainAuthor, ...authorSet ]);
}

View File

@ -1,6 +1,6 @@
// Supports: Deno, Bun, Node.js >= 18 (ts-node)
const pushURL: string = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval: number = 60;
const pushURL : string = "https://example.com/api/push/key?status=up&msg=OK&ping=";
const interval : number = 60;
const push = async () => {
await fetch(pushURL);

View File

@ -8,7 +8,7 @@ async function main() {
const branch = process.argv[2];
// Use gh to get current branch's pr id
let currentBranchPRID = execSync('gh pr view --json number --jq ".number"').toString().trim();
let currentBranchPRID = execSync("gh pr view --json number --jq \".number\"").toString().trim();
console.log("Pr ID: ", currentBranchPRID);
// Use gh commend to get pr commits

View File

@ -7,28 +7,24 @@ import {
checkTagExists,
checkVersionFormat,
getRepoNames,
execSync,
checkReleaseBranch,
createDistTarGz,
createReleasePR,
pressAnyKey,
execSync, uploadArtifacts, checkReleaseBranch,
} from "./lib.mjs";
import semver from "semver";
const repoNames = getRepoNames();
const version = process.env.RELEASE_BETA_VERSION;
const dryRun = process.env.DRY_RUN === "true";
const previousVersion = process.env.RELEASE_PREVIOUS_VERSION;
const branchName = `release-${version}`;
const githubRunId = process.env.GITHUB_RUN_ID;
if (dryRun) {
console.log("Dry run mode enabled. No images will be pushed.");
}
const githubToken = process.env.RELEASE_GITHUB_TOKEN;
console.log("RELEASE_BETA_VERSION:", version);
// Check if the current branch is "release-{version}"
checkReleaseBranch(branchName);
if (!githubToken) {
console.error("GITHUB_TOKEN is required");
process.exit(1);
}
// Check if the current branch is "release"
checkReleaseBranch();
// Check if the version is a valid semver
checkVersionFormat(version);
@ -48,34 +44,24 @@ checkDocker();
await checkTagExists(repoNames, version);
// node extra/beta/update-version.js
await import("../beta/update-version.mjs");
// Create Pull Request (gh pr create will handle pushing the branch)
await createReleasePR(version, previousVersion, dryRun, branchName, githubRunId);
execSync("node ./extra/beta/update-version.js");
// Build frontend dist
buildDist();
if (!dryRun) {
// Build slim image (rootless)
buildImage(
repoNames,
["beta-slim-rootless", ver(version, "slim-rootless")],
"rootless",
"BASE_IMAGE=louislam/uptime-kuma:base2-slim"
);
// Build slim image (rootless)
buildImage(repoNames, [ "beta-slim-rootless", ver(version, "slim-rootless") ], "rootless", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
// Build full image (rootless)
buildImage(repoNames, ["beta-rootless", ver(version, "rootless")], "rootless");
// Build full image (rootless)
buildImage(repoNames, [ "beta-rootless", ver(version, "rootless") ], "rootless");
// Build slim image
buildImage(repoNames, ["beta-slim", ver(version, "slim")], "release", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
// Build slim image
buildImage(repoNames, [ "beta-slim", ver(version, "slim") ], "release", "BASE_IMAGE=louislam/uptime-kuma:base2-slim");
// Build full image
buildImage(repoNames, ["beta", version], "release");
} else {
console.log("Dry run mode - skipping image build and push.");
}
// Build full image
buildImage(repoNames, [ "beta", version ], "release");
// Create dist.tar.gz
await createDistTarGz();
await pressAnyKey();
// npm run upload-artifacts
uploadArtifacts(version, githubToken);

View File

@ -1,9 +1,6 @@
import "dotenv/config";
import * as childProcess from "child_process";
import semver from "semver";
import { generateChangelog } from "../generate-changelog.mjs";
import fs from "fs";
import tar from "tar";
export const dryRun = process.env.RELEASE_DRY_RUN === "1";
@ -26,14 +23,16 @@ export function checkDocker() {
/**
* Get Docker Hub repository name
* @returns {string[]} List of repository names
*/
export function getRepoNames() {
if (process.env.RELEASE_REPO_NAMES) {
// Split by comma
return process.env.RELEASE_REPO_NAMES.split(",").map((name) => name.trim());
}
return ["louislam/uptime-kuma", "ghcr.io/louislam/uptime-kuma"];
return [
"louislam/uptime-kuma",
"ghcr.io/louislam/uptime-kuma",
];
}
/**
@ -58,15 +57,15 @@ export function buildDist() {
* @param {string} platform Build platform
* @returns {void}
*/
export function buildImage(
repoNames,
tags,
target,
buildArgs = "",
dockerfile = "docker/dockerfile",
platform = "linux/amd64,linux/arm64,linux/arm/v7"
) {
let args = ["buildx", "build", "-f", dockerfile, "--platform", platform];
export function buildImage(repoNames, tags, target, buildArgs = "", dockerfile = "docker/dockerfile", platform = "linux/amd64,linux/arm64,linux/arm/v7") {
let args = [
"buildx",
"build",
"-f",
dockerfile,
"--platform",
platform,
];
for (let repoName of repoNames) {
// Add tags
@ -75,14 +74,22 @@ export function buildImage(
}
}
args = [...args, "--target", target];
args = [
...args,
"--target",
target,
];
// Add build args
if (buildArgs) {
args.push("--build-arg", buildArgs);
}
args = [...args, ".", "--push"];
args = [
...args,
".",
"--push",
];
if (!dryRun) {
childProcess.spawnSync("docker", args, { stdio: "inherit" });
@ -165,13 +172,11 @@ export function pressAnyKey() {
console.log("Git Push and Publish the release note on github, then press any key to continue");
process.stdin.setRawMode(true);
process.stdin.resume();
return new Promise((resolve) =>
process.stdin.once("data", (data) => {
process.stdin.setRawMode(false);
process.stdin.pause();
resolve();
})
);
return new Promise(resolve => process.stdin.once("data", data => {
process.stdin.setRawMode(false);
process.stdin.pause();
resolve();
}));
}
/**
@ -184,9 +189,9 @@ export function ver(version, identifier) {
const obj = semver.parse(version);
if (obj.prerelease.length === 0) {
obj.prerelease = [identifier];
obj.prerelease = [ identifier ];
} else {
obj.prerelease[0] = [obj.prerelease[0], identifier].join("-");
obj.prerelease[0] = [ obj.prerelease[0], identifier ].join("-");
}
return obj.format();
}
@ -197,7 +202,6 @@ export function ver(version, identifier) {
* @param {string} version Version
* @param {string} githubToken GitHub token
* @returns {void}
* @deprecated
*/
export function uploadArtifacts(version, githubToken) {
let args = [
@ -247,117 +251,14 @@ export function execSync(cmd) {
}
/**
* Check if the current branch matches the expected release branch pattern
* @param {string} expectedBranch Expected branch name (can be "release" or "release-{version}")
* Check if the current branch is "release"
* @returns {void}
*/
export function checkReleaseBranch(expectedBranch = "release") {
const res = childProcess.spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"]);
export function checkReleaseBranch() {
const res = childProcess.spawnSync("git", [ "rev-parse", "--abbrev-ref", "HEAD" ]);
const branch = res.stdout.toString().trim();
if (branch !== expectedBranch) {
console.error(`Current branch is ${branch}, please switch to "${expectedBranch}" branch`);
if (branch !== "release") {
console.error(`Current branch is ${branch}, please switch to "release" branch`);
process.exit(1);
}
}
/**
* Create dist.tar.gz from the dist directory
* Similar to "tar -zcvf dist.tar.gz dist", but using nodejs
* @returns {Promise<void>}
*/
export async function createDistTarGz() {
const distPath = "dist";
const outputPath = "./tmp/dist.tar.gz";
const tmpDir = "./tmp";
// Ensure tmp directory exists
if (!fs.existsSync(tmpDir)) {
fs.mkdirSync(tmpDir, { recursive: true });
}
// Check if dist directory exists
if (!fs.existsSync(distPath)) {
console.error("Error: dist directory not found");
process.exit(1);
}
console.log(`Creating ${outputPath} from ${distPath}...`);
try {
await tar.create(
{
gzip: true,
file: outputPath,
},
[distPath]
);
console.log(`Successfully created ${outputPath}`);
} catch (error) {
console.error(`Failed to create tarball: ${error.message}`);
process.exit(1);
}
}
/**
* Create a draft release PR
* @param {string} version Version
* @param {string} previousVersion Previous version tag
* @param {boolean} dryRun Still create the PR, but add "[DRY RUN]" to the title
* @param {string} branchName The branch name to use for the PR head (defaults to "release")
* @param {string} githubRunId The GitHub Actions run ID for linking to artifacts
* @returns {Promise<void>}
*/
export async function createReleasePR(version, previousVersion, dryRun, branchName = "release", githubRunId = null) {
const changelog = await generateChangelog(previousVersion);
const title = dryRun ? `chore: update to ${version} (dry run)` : `chore: update to ${version}`;
// Build the artifact link - use direct run link if available, otherwise link to workflow file
const artifactLink = githubRunId
? `https://github.com/louislam/uptime-kuma/actions/runs/${githubRunId}/workflow`
: `https://github.com/louislam/uptime-kuma/actions/workflows/beta-release.yml`;
const body = `## Release ${version}
This PR prepares the release for version ${version}.
### Manual Steps Required
- [ ] Merge this PR (squash and merge)
- [ ] Create a new release on GitHub with the tag \`${version}\`.
- [ ] Ask any LLM to categorize the changelog into sections.
- [ ] Place the changelog in the release note.
- [ ] Download the \`dist.tar.gz\` artifact from the [workflow run](${artifactLink}) and upload it to the release.
- [ ] (Beta only) Set prerelease
- [ ] Publish the release note on GitHub.
### Changelog
\`\`\`md
${changelog}
\`\`\`
### Release Artifacts
The \`dist.tar.gz\` archive will be available as an artifact in the workflow run.
`;
// Create the PR using gh CLI
const args = ["pr", "create", "--title", title, "--body", body, "--base", "master", "--head", branchName, "--draft"];
console.log(`Creating draft PR: ${title}`);
const result = childProcess.spawnSync("gh", args, {
encoding: "utf-8",
stdio: "inherit",
env: {
...process.env,
GH_TOKEN: process.env.GH_TOKEN || process.env.GITHUB_TOKEN,
},
});
if (result.status !== 0) {
console.error("Failed to create pull request");
process.exit(1);
}
console.log("Successfully created draft pull request");
}

View File

@ -8,7 +8,7 @@ const TwoFA = require("../server/2fa");
const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
output: process.stdout
});
const main = async () => {
@ -19,7 +19,7 @@ const main = async () => {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (!user) {
if (! user) {
throw new Error("user not found, have you installed?");
}
@ -31,6 +31,7 @@ const main = async () => {
await TwoFA.disable2FA(user.id);
console.log("2FA has been removed successfully.");
}
}
} catch (e) {
console.error("Error: " + e.message);

View File

@ -21,3 +21,4 @@ const main = async () => {
};
main();

View File

@ -12,7 +12,7 @@ const args = require("args-parser")(process.argv);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
output: process.stdout
});
const main = async () => {
@ -28,7 +28,7 @@ const main = async () => {
// No need to actually reset the password for testing, just make sure no connection problem. It is ok for now.
if (!process.env.TEST_BACKEND) {
const user = await R.findOne("user");
if (!user) {
if (! user) {
throw new Error("user not found, have you installed?");
}
@ -41,10 +41,7 @@ const main = async () => {
// When called with "--new-password" argument for unattended modification (e.g. npm run reset-password -- --new_password=secret)
if ("new-password" in args) {
console.log("Using password from argument");
console.warn(
"\x1b[31m%s\x1b[0m",
"Warning: the password might be stored, in plain text, in your shell's history"
);
console.warn("\x1b[31m%s\x1b[0m", "Warning: the password might be stored, in plain text, in your shell's history");
password = confirmPassword = args["new-password"] + "";
if (passwordStrength(password).value === "Too weak") {
throw new Error("Password is too weak, please use a stronger password.");
@ -74,6 +71,7 @@ const main = async () => {
}
}
console.log("Password reset successfully.");
}
} catch (e) {
console.error("Error: " + e.message);
@ -114,23 +112,19 @@ function disconnectAllSocketClients(username, password) {
timeout: 5000,
});
socket.on("connect", () => {
socket.emit(
"login",
{
username,
password,
},
(res) => {
if (res.ok) {
console.log("Logged in.");
socket.emit("disconnectOtherSocketClients");
} else {
console.warn("Login failed.");
console.warn("Please restart the server to disconnect all sessions.");
}
socket.close();
socket.emit("login", {
username,
password,
}, (res) => {
if (res.ok) {
console.log("Logged in.");
socket.emit("disconnectOtherSocketClients");
} else {
console.warn("Login failed.");
console.warn("Please restart the server to disconnect all sessions.");
}
);
socket.close();
});
});
socket.on("connect_error", function () {

View File

@ -7,7 +7,7 @@ const dns2 = require("dns2");
const { Packet } = dns2;
const server = dns2.createServer({
udp: true,
udp: true
});
server.on("request", (request, send, rinfo) => {
@ -17,13 +17,14 @@ server.on("request", (request, send, rinfo) => {
const response = Packet.createResponseFromRequest(request);
if (question.name === "existing.com") {
if (question.type === Packet.TYPE.A) {
response.answers.push({
name: question.name,
type: question.type,
class: question.class,
ttl: 300,
address: "1.2.3.4",
address: "1.2.3.4"
});
} else if (question.type === Packet.TYPE.AAAA) {
response.answers.push({
@ -48,7 +49,7 @@ server.on("request", (request, send, rinfo) => {
class: question.class,
ttl: 300,
exchange: "mx1.existing.com",
priority: 5,
priority: 5
});
} else if (question.type === Packet.TYPE.NS) {
response.answers.push({
@ -102,6 +103,7 @@ server.on("request", (request, send, rinfo) => {
value: "ca.existing.com",
});
}
}
if (question.name === "4.3.2.1.in-addr.arpa") {
@ -130,7 +132,7 @@ server.on("close", () => {
});
server.listen({
udp: 5300,
udp: 5300
});
/**

View File

@ -41,19 +41,17 @@ server1.aedes.on("subscribe", (subscriptions, client) => {
for (let s of subscriptions) {
if (s.topic === "test") {
server1.aedes.publish(
{
topic: "test",
payload: Buffer.from("ok"),
},
(error) => {
if (error) {
log.error("mqtt_server", error);
}
server1.aedes.publish({
topic: "test",
payload: Buffer.from("ok"),
}, (error) => {
if (error) {
log.error("mqtt_server", error);
}
);
});
}
}
});
server1.start();

View File

@ -10,7 +10,7 @@ let lines = file.split("\n");
lines = lines.filter((line) => line !== "");
// Remove duplicates
lines = [...new Set(lines)];
lines = [ ...new Set(lines) ];
// Remove @weblate and @UptimeKumaBot
lines = lines.filter((line) => line !== "@weblate" && line !== "@UptimeKumaBot" && line !== "@louislam");

View File

@ -54,13 +54,13 @@ async function updateLanguage(langCode, baseLangCode) {
} else {
console.log("Empty file");
obj = {
languageName: "<Your Language name in your language (not in English)>",
languageName: "<Your Language name in your language (not in English)>"
};
}
// En first
for (const key in en) {
if (!obj[key]) {
if (! obj[key]) {
obj[key] = en[key];
}
}
@ -68,17 +68,15 @@ async function updateLanguage(langCode, baseLangCode) {
if (baseLang !== en) {
// Base second
for (const key in baseLang) {
if (!obj[key]) {
if (! obj[key]) {
obj[key] = key;
}
}
}
const code =
"export default " +
util.inspect(obj, {
depth: null,
});
const code = "export default " + util.inspect(obj, {
depth: null,
});
fs.writeFileSync(`../../src/languages/${file}`, code);
}

View File

@ -9,14 +9,15 @@ const newVersion = process.env.RELEASE_VERSION;
console.log("New Version: " + newVersion);
if (!newVersion) {
if (! newVersion) {
console.error("invalid version");
process.exit(1);
}
const exists = tagExists(newVersion);
if (!exists) {
if (! exists) {
// Process package.json
pkg.version = newVersion;
@ -26,19 +27,20 @@ if (!exists) {
// Also update package-lock.json
const npm = /^win/.test(process.platform) ? "npm.cmd" : "npm";
const resultVersion = childProcess.spawnSync(npm, ["--no-git-tag-version", "version", newVersion], { shell: true });
const resultVersion = childProcess.spawnSync(npm, [ "--no-git-tag-version", "version", newVersion ], { shell: true });
if (resultVersion.error) {
console.error(resultVersion.error);
console.error("error npm version!");
process.exit(1);
}
const resultInstall = childProcess.spawnSync(npm, ["install"], { shell: true });
const resultInstall = childProcess.spawnSync(npm, [ "install" ], { shell: true });
if (resultInstall.error) {
console.error(resultInstall.error);
console.error("error update package-lock!");
process.exit(1);
}
commit(newVersion);
} else {
console.log("version exists");
}
@ -52,7 +54,7 @@ if (!exists) {
function commit(version) {
let msg = "Update to " + version;
let res = childProcess.spawnSync("git", ["commit", "-m", msg, "-a"]);
let res = childProcess.spawnSync("git", [ "commit", "-m", msg, "-a" ]);
let stdout = res.stdout.toString().trim();
console.log(stdout);
@ -68,11 +70,11 @@ function commit(version) {
* @throws Version is not valid
*/
function tagExists(version) {
if (!version) {
if (! version) {
throw new Error("invalid version");
}
let res = childProcess.spawnSync("git", ["tag", "-l", version]);
let res = childProcess.spawnSync("git", [ "tag", "-l", version ]);
return res.stdout.toString().trim() === version;
}

View File

@ -21,23 +21,23 @@ function updateWiki(newVersion) {
safeDelete(wikiDir);
childProcess.spawnSync("git", ["clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir]);
childProcess.spawnSync("git", [ "clone", "https://github.com/louislam/uptime-kuma.wiki.git", wikiDir ]);
let content = fs.readFileSync(howToUpdateFilename).toString();
// Replace the version: https://regex101.com/r/hmj2Bc/1
content = content.replace(/(git checkout )([^\s]+)/, `$1${newVersion}`);
fs.writeFileSync(howToUpdateFilename, content);
childProcess.spawnSync("git", ["add", "-A"], {
childProcess.spawnSync("git", [ "add", "-A" ], {
cwd: wikiDir,
});
childProcess.spawnSync("git", ["commit", "-m", `Update to ${newVersion}`], {
childProcess.spawnSync("git", [ "commit", "-m", `Update to ${newVersion}` ], {
cwd: wikiDir,
});
console.log("Pushing to Github");
childProcess.spawnSync("git", ["push"], {
childProcess.spawnSync("git", [ "push" ], {
cwd: wikiDir,
});

View File

@ -9,16 +9,16 @@ if (!platform) {
const supportedPlatforms = [
{
name: "linux/amd64",
bin: "./build/uptime-kuma-push-amd64",
bin: "./build/uptime-kuma-push-amd64"
},
{
name: "linux/arm64",
bin: "./build/uptime-kuma-push-arm64",
bin: "./build/uptime-kuma-push-arm64"
},
{
name: "linux/arm/v7",
bin: "./build/uptime-kuma-push-armv7",
},
bin: "./build/uptime-kuma-push-armv7"
}
];
let platformObj = null;
@ -45,3 +45,4 @@ if (platformObj) {
console.error("Unsupported platform: " + platform);
process.exit(1);
}

2829
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "uptime-kuma",
"version": "2.1.0-beta.2",
"version": "2.1.0-beta.1",
"license": "MIT",
"repository": {
"type": "git",
@ -16,7 +16,6 @@
"lint:style": "stylelint \"**/*.{vue,css,scss}\" --ignore-path .gitignore",
"lint-fix:style": "stylelint \"**/*.{vue,css,scss}\" --fix --ignore-path .gitignore",
"lint": "npm run lint:js && npm run lint:style",
"fmt": "prettier --write \"**/*.{js,ts,vue,css,scss,json,md,yml,yaml}\"",
"lint:prod": "npm run lint:js-prod && npm run lint:style",
"dev": "concurrently -k -r \"wait-on tcp:3000 && npm run start-server-dev \" \"npm run start-frontend-dev\"",
"start-frontend-dev": "cross-env NODE_ENV=development vite --host --config ./config/vite.config.js",
@ -29,8 +28,8 @@
"test": "npm run test-backend && npm run test-e2e",
"test-with-build": "npm run build && npm test",
"test-backend": "node test/test-backend.mjs",
"test-backend-22": "cross-env TEST_BACKEND=1 node --test --test-reporter=spec \"test/backend-test/**/*.js\"",
"test-backend-20": "cross-env TEST_BACKEND=1 node --test --test-reporter=spec test/backend-test",
"test-backend-22": "cross-env TEST_BACKEND=1 node --test \"test/backend-test/**/*.js\"",
"test-backend-20": "cross-env TEST_BACKEND=1 node --test test/backend-test",
"test-e2e": "playwright test --config ./config/playwright.config.js",
"test-e2e-ui": "playwright test --config ./config/playwright.config.js --ui --ui-port=51063",
"playwright-codegen": "playwright codegen localhost:3000 --save-storage=./private/e2e-auth.json",
@ -122,9 +121,10 @@
"nanoid": "~3.3.4",
"net-snmp": "^3.11.2",
"node-cloudflared-tunnel": "~1.0.9",
"node-fetch-cache": "^5.1.0",
"node-radius-utils": "~1.2.0",
"nodemailer": "~7.0.12",
"nostr-tools": "^2.17.0",
"nostr-tools": "^2.10.4",
"notp": "~2.0.3",
"openid-client": "^5.4.2",
"password-hash": "~1.2.2",
@ -158,7 +158,7 @@
"@fortawesome/fontawesome-svg-core": "~1.2.36",
"@fortawesome/free-regular-svg-icons": "~5.15.4",
"@fortawesome/free-solid-svg-icons": "~5.15.4",
"@fortawesome/vue-fontawesome": "~3.1.3",
"@fortawesome/vue-fontawesome": "~3.0.0-5",
"@playwright/test": "~1.39.0",
"@popperjs/core": "~2.10.2",
"@testcontainers/hivemq": "^10.13.1",
@ -187,36 +187,33 @@
"dns2": "~2.0.1",
"dompurify": "~3.2.4",
"eslint": "~8.14.0",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-jsdoc": "~46.4.6",
"eslint-plugin-vue": "~8.7.1",
"favico.js": "~0.3.10",
"get-port-please": "^3.1.1",
"node-ssh": "~13.1.0",
"postcss-html": "~1.8.1",
"postcss-rtlcss": "~5.7.1",
"postcss-html": "~1.5.0",
"postcss-rtlcss": "~3.7.2",
"postcss-scss": "~4.0.4",
"prettier": "^3.7.4",
"prismjs": "~1.30.0",
"qrcode": "~1.5.0",
"rollup-plugin-visualizer": "^5.6.0",
"sass": "~1.42.1",
"stylelint": "^15.10.1",
"stylelint-config-prettier": "^9.0.5",
"stylelint-config-standard": "~25.0.0",
"terser": "~5.15.0",
"test": "~3.3.0",
"testcontainers": "^11.5.0",
"testcontainers": "^10.13.1",
"typescript": "~4.4.4",
"v-pagination-3": "~0.1.7",
"vite": "~5.4.15",
"vite-plugin-compression": "^0.5.1",
"vite-plugin-pwa": "^1.1.0",
"vue": "~3.5.26",
"vue": "~3.4.2",
"vue-chartjs": "~5.2.0",
"vue-confirm-dialog": "~1.0.2",
"vue-contenteditable": "~3.0.4",
"vue-i18n": "~11.2.8",
"vue-i18n": "~9.14.3",
"vue-image-crop-upload": "~3.0.3",
"vue-multiselect": "~3.0.0-alpha.2",
"vue-prism-editor": "~2.0.0-alpha.2",

View File

@ -1,14 +1,18 @@
const { R } = require("redbean-node");
class TwoFA {
/**
* Disable 2FA for specified user
* @param {number} userID ID of user to disable
* @returns {Promise<void>}
*/
static async disable2FA(userID) {
return await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [userID]);
return await R.exec("UPDATE `user` SET twofa_status = 0 WHERE id = ? ", [
userID,
]);
}
}
module.exports = TwoFA;

View File

@ -16,10 +16,7 @@ function getAnalyticsScript(statusPage) {
case "umami":
return umamiAnalytics.getUmamiAnalyticsScript(statusPage.analyticsScriptUrl, statusPage.analyticsId);
case "plausible":
return plausibleAnalytics.getPlausibleAnalyticsScript(
statusPage.analyticsScriptUrl,
statusPage.analyticsId
);
return plausibleAnalytics.getPlausibleAnalyticsScript(statusPage.analyticsScriptUrl, statusPage.analyticsId);
case "matomo":
return matomoAnalytics.getMatomoAnalyticsScript(statusPage.analyticsScriptUrl, statusPage.analyticsId);
default:
@ -47,5 +44,5 @@ function isValidAnalyticsConfig(statusPage) {
module.exports = {
getAnalyticsScript,
isValidAnalyticsConfig,
isValidAnalyticsConfig
};

View File

@ -5,7 +5,7 @@ const { escape } = require("html-escaper");
* Returns a string that represents the javascript that is required to insert the Plausible Analytics script
* into a webpage.
* @param {string} scriptUrl the Plausible Analytics script url.
* @param {string} domainsToMonitor Domains to track separated by a ',' to add Plausible Analytics script.
* @param {string} domainsToMonitor Domains to track seperated by a ',' to add Plausible Analytics script.
* @returns {string} HTML script tags to inject into page
*/
function getPlausibleAnalyticsScript(scriptUrl, domainsToMonitor) {
@ -32,5 +32,5 @@ function getPlausibleAnalyticsScript(scriptUrl, domainsToMonitor) {
}
module.exports = {
getPlausibleAnalyticsScript,
getPlausibleAnalyticsScript
};

View File

@ -18,7 +18,9 @@ exports.login = async function (username, password) {
return null;
}
let user = await R.findOne("user", "TRIM(username) = ? AND active = 1 ", [username.trim()]);
let user = await R.findOne("user", "TRIM(username) = ? AND active = 1 ", [
username.trim(),
]);
if (user && passwordHash.verify(password, user.password)) {
// Upgrade the hash to bcrypt
@ -48,7 +50,7 @@ async function verifyAPIKey(key) {
let index = key.substring(2, key.indexOf("_"));
let clear = key.substring(key.indexOf("_") + 1, key.length);
let hash = await R.findOne("api_key", " id=? ", [index]);
let hash = await R.findOne("api_key", " id=? ", [ index ]);
if (hash === null) {
return false;
@ -154,7 +156,7 @@ exports.basicAuth = async function (req, res, next) {
* @returns {Promise<void>}
*/
exports.apiAuth = async function (req, res, next) {
if (!(await Settings.get("disableAuth"))) {
if (!await Settings.get("disableAuth")) {
let usingAPIKeys = await Settings.get("apiKeysEnabled");
let middleware;
if (usingAPIKeys) {

View File

@ -14,7 +14,7 @@ let interval;
exports.startInterval = () => {
let check = async () => {
if ((await setting("checkUpdate")) === false) {
if (await setting("checkUpdate") === false) {
return;
}
@ -40,9 +40,11 @@ exports.startInterval = () => {
if (res.data.slow) {
exports.latestVersion = res.data.slow;
}
} catch (_) {
log.info("update-checker", "Failed to check for new versions");
}
};
check();

View File

@ -19,12 +19,14 @@ async function sendNotificationList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("notification", " user_id = ? ", [socket.userID]);
let list = await R.find("notification", " user_id = ? ", [
socket.userID,
]);
for (let bean of list) {
let notificationObject = bean.export();
notificationObject.isDefault = notificationObject.isDefault === 1;
notificationObject.active = notificationObject.active === 1;
notificationObject.isDefault = (notificationObject.isDefault === 1);
notificationObject.active = (notificationObject.active === 1);
result.push(notificationObject);
}
@ -44,15 +46,14 @@ async function sendNotificationList(socket) {
* @returns {Promise<void>}
*/
async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
let list = await R.getAll(
`
let list = await R.getAll(`
SELECT * FROM heartbeat
WHERE monitor_id = ?
ORDER BY time DESC
LIMIT 100
`,
[monitorID]
);
`, [
monitorID,
]);
let result = list.reverse();
@ -74,26 +75,23 @@ async function sendHeartbeatList(socket, monitorID, toUser = false, overwrite =
async function sendImportantHeartbeatList(socket, monitorID, toUser = false, overwrite = false) {
const timeLogger = new TimeLogger();
let list = await R.find(
"heartbeat",
`
let list = await R.find("heartbeat", `
monitor_id = ?
AND important = 1
ORDER BY time DESC
LIMIT 500
`,
[monitorID]
);
`, [
monitorID,
]);
timeLogger.print(`[Monitor: ${monitorID}] sendImportantHeartbeatList`);
const result = list.map((bean) => bean.toJSON());
if (toUser) {
io.to(socket.userID).emit("importantHeartbeatList", monitorID, result, overwrite);
io.to(socket.userID).emit("importantHeartbeatList", monitorID, list, overwrite);
} else {
socket.emit("importantHeartbeatList", monitorID, result, overwrite);
socket.emit("importantHeartbeatList", monitorID, list, overwrite);
}
}
/**
@ -104,11 +102,8 @@ async function sendImportantHeartbeatList(socket, monitorID, toUser = false, ove
async function sendProxyList(socket) {
const timeLogger = new TimeLogger();
const list = await R.find("proxy", " user_id = ? ", [socket.userID]);
io.to(socket.userID).emit(
"proxyList",
list.map((bean) => bean.export())
);
const list = await R.find("proxy", " user_id = ? ", [ socket.userID ]);
io.to(socket.userID).emit("proxyList", list.map(bean => bean.export()));
timeLogger.print("Send Proxy List");
@ -124,7 +119,11 @@ async function sendAPIKeyList(socket) {
const timeLogger = new TimeLogger();
let result = [];
const list = await R.find("api_key", "user_id=?", [socket.userID]);
const list = await R.find(
"api_key",
"user_id=?",
[ socket.userID ],
);
for (let bean of list) {
result.push(bean.toPublicJSON());
@ -151,7 +150,7 @@ async function sendInfo(socket, hideVersion = false) {
if (!hideVersion) {
info.version = checkVersion.version;
info.latestVersion = checkVersion.latestVersion;
info.isContainer = process.env.UPTIME_KUMA_IS_CONTAINER === "1";
info.isContainer = (process.env.UPTIME_KUMA_IS_CONTAINER === "1");
info.dbType = Database.dbConfig.type;
info.runtime = {
platform: process.platform, // linux or win32
@ -171,7 +170,9 @@ async function sendDockerHostList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("docker_host", " user_id = ? ", [socket.userID]);
let list = await R.find("docker_host", " user_id = ? ", [
socket.userID,
]);
for (let bean of list) {
result.push(bean.toJSON());
@ -193,7 +194,9 @@ async function sendRemoteBrowserList(socket) {
const timeLogger = new TimeLogger();
let result = [];
let list = await R.find("remote_browser", " user_id = ? ", [socket.userID]);
let list = await R.find("remote_browser", " user_id = ? ", [
socket.userID,
]);
for (let bean of list) {
result.push(bean.toJSON());
@ -212,24 +215,21 @@ async function sendRemoteBrowserList(socket) {
* @returns {Promise<void>}
*/
async function sendMonitorTypeList(socket) {
const result = Object.entries(UptimeKumaServer.monitorTypeList).map(([key, type]) => {
return [
key,
{
supportsConditions: type.supportsConditions,
conditionVariables: type.conditionVariables.map((v) => {
return {
id: v.id,
operators: v.operators.map((o) => {
return {
id: o.id,
caption: o.caption,
};
}),
};
}),
},
];
const result = Object.entries(UptimeKumaServer.monitorTypeList).map(([ key, type ]) => {
return [ key, {
supportsConditions: type.supportsConditions,
conditionVariables: type.conditionVariables.map(v => {
return {
id: v.id,
operators: v.operators.map(o => {
return {
id: o.id,
caption: o.caption,
};
}),
};
}),
}];
});
io.to(socket.userID).emit("monitorTypeList", Object.fromEntries(result));

View File

@ -1,7 +1,7 @@
const isFreeBSD = /^freebsd/.test(process.platform);
// Interop with browser
const args = typeof process !== "undefined" ? require("args-parser")(process.argv) : {};
const args = (typeof process !== "undefined") ? require("args-parser")(process.argv) : {};
// If host is omitted, the server will accept connections on the unspecified IPv6 address (::) when IPv6 is available and the unspecified IPv4 address (0.0.0.0) otherwise.
// Dual-stack support for (::)
@ -9,17 +9,13 @@ const args = typeof process !== "undefined" ? require("args-parser")(process.arg
let hostEnv = isFreeBSD ? null : process.env.HOST;
const hostname = args.host || process.env.UPTIME_KUMA_HOST || hostEnv;
const port = [args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001]
.map((portValue) => parseInt(portValue))
.find((portValue) => !isNaN(portValue));
const port = [ args.port, process.env.UPTIME_KUMA_PORT, process.env.PORT, 3001 ]
.map(portValue => parseInt(portValue))
.find(portValue => !isNaN(portValue));
const sslKey = args["ssl-key"] || process.env.UPTIME_KUMA_SSL_KEY || process.env.SSL_KEY || undefined;
const sslCert = args["ssl-cert"] || process.env.UPTIME_KUMA_SSL_CERT || process.env.SSL_CERT || undefined;
const sslKeyPassphrase =
args["ssl-key-passphrase"] ||
process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE ||
process.env.SSL_KEY_PASSPHRASE ||
undefined;
const sslKeyPassphrase = args["ssl-key-passphrase"] || process.env.UPTIME_KUMA_SSL_KEY_PASSPHRASE || process.env.SSL_KEY_PASSPHRASE || undefined;
const isSSL = sslKey && sslCert;

View File

@ -18,6 +18,7 @@ const SqlString = require("sqlstring");
* Database & App Data Folder
*/
class Database {
/**
* Bootstrap database for SQLite
* @type {string}
@ -88,7 +89,7 @@ class Database {
"patch-added-mqtt-monitor.sql": true,
"patch-add-clickable-status-page-link.sql": true,
"patch-add-sqlserver-monitor.sql": true,
"patch-add-other-auth.sql": { parents: ["patch-monitor-basic-auth.sql"] },
"patch-add-other-auth.sql": { parents: [ "patch-monitor-basic-auth.sql" ] },
"patch-grpc-monitor.sql": true,
"patch-add-radius-monitor.sql": true,
"patch-monitor-add-resend-interval.sql": true,
@ -137,24 +138,24 @@ class Database {
Database.dataDir = process.env.DATA_DIR || args["data-dir"] || "./data/";
Database.sqlitePath = path.join(Database.dataDir, "kuma.db");
if (!fs.existsSync(Database.dataDir)) {
if (! fs.existsSync(Database.dataDir)) {
fs.mkdirSync(Database.dataDir, { recursive: true });
}
Database.uploadDir = path.join(Database.dataDir, "upload/");
if (!fs.existsSync(Database.uploadDir)) {
if (! fs.existsSync(Database.uploadDir)) {
fs.mkdirSync(Database.uploadDir, { recursive: true });
}
// Create screenshot dir
Database.screenshotDir = path.join(Database.dataDir, "screenshots/");
if (!fs.existsSync(Database.screenshotDir)) {
if (! fs.existsSync(Database.screenshotDir)) {
fs.mkdirSync(Database.screenshotDir, { recursive: true });
}
Database.dockerTLSDir = path.join(Database.dataDir, "docker-tls/");
if (!fs.existsSync(Database.dockerTLSDir)) {
if (! fs.existsSync(Database.dockerTLSDir)) {
fs.mkdirSync(Database.dockerTLSDir, { recursive: true });
}
@ -165,7 +166,7 @@ class Database {
* Read the database config
* @throws {Error} If the config is invalid
* @typedef {string|undefined} envString
* @returns {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString, socketPath:envString}} Database config
* @returns {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString}} Database config
*/
static readDBConfig() {
let dbConfig;
@ -185,7 +186,7 @@ class Database {
/**
* @typedef {string|undefined} envString
* @param {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString, socketPath:envString}} dbConfig the database configuration that should be written
* @param {{type: "sqlite"} | {type:envString, hostname:envString, port:envString, database:envString, username:envString, password:envString}} dbConfig the database configuration that should be written
* @returns {void}
*/
static writeDBConfig(dbConfig) {
@ -227,22 +228,13 @@ class Database {
if (!process.env.UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS) {
parsedMaxPoolConnections = 10;
} else if (Number.isNaN(parsedMaxPoolConnections)) {
log.warn(
"db",
"Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was invalid."
);
log.warn("db", "Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was invalid.");
parsedMaxPoolConnections = 10;
} else if (parsedMaxPoolConnections < 1) {
log.warn(
"db",
"Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was less than 1."
);
log.warn("db", "Max database connections defaulted to 10 because UPTIME_KUMA_DB_POOL_MAX_CONNECTIONS was less than 1.");
parsedMaxPoolConnections = 10;
} else if (parsedMaxPoolConnections > 100) {
log.warn(
"db",
"Max database connections capped to 100 because Mysql/Mariadb connections are heavy. consider using a proxy like ProxySQL or MaxScale."
);
log.warn("db", "Max database connections capped to 100 because Mysql/Mariadb connections are heavy. consider using a proxy like ProxySQL or MaxScale.");
parsedMaxPoolConnections = 100;
}
@ -255,7 +247,8 @@ class Database {
log.info("db", `Database Type: ${dbConfig.type}`);
if (dbConfig.type === "sqlite") {
if (!fs.existsSync(Database.sqlitePath)) {
if (! fs.existsSync(Database.sqlitePath)) {
log.info("server", "Copying Database");
fs.copyFileSync(Database.templatePath, Database.sqlitePath);
}
@ -276,7 +269,7 @@ class Database {
idleTimeoutMillis: 120 * 1000,
propagateCreateError: false,
acquireTimeoutMillis: acquireConnectionTimeout,
},
}
};
} else if (dbConfig.type === "mariadb") {
const connection = await mysql.createConnection({
@ -284,15 +277,6 @@ class Database {
port: dbConfig.port,
user: dbConfig.username,
password: dbConfig.password,
socketPath: dbConfig.socketPath,
...(dbConfig.ssl
? {
ssl: {
rejectUnauthorized: true,
...(dbConfig.ca && dbConfig.ca.trim() !== "" ? { ca: [dbConfig.ca] } : {}),
},
}
: {}),
});
// Set to true, so for example "uptime.kuma", becomes `uptime.kuma`, not `uptime`.`kuma`
@ -310,7 +294,6 @@ class Database {
user: dbConfig.username,
password: dbConfig.password,
database: dbConfig.dbName,
socketPath: dbConfig.socketPath,
timezone: "Z",
typeCast: function (field, next) {
if (field.type === "DATETIME") {
@ -319,14 +302,6 @@ class Database {
}
return next();
},
...(dbConfig.ssl
? {
ssl: {
rejectUnauthorized: true,
...(dbConfig.ca && dbConfig.ca.trim() !== "" ? { ca: [dbConfig.ca] } : {}),
},
}
: {}),
},
pool: mariadbPoolConfig,
};
@ -412,7 +387,7 @@ class Database {
log.debug("db", "SQLite config:");
log.debug("db", await R.getAll("PRAGMA journal_mode"));
log.debug("db", await R.getAll("PRAGMA cache_size"));
log.debug("db", "SQLite Version: " + (await R.getCell("SELECT sqlite_version()")));
log.debug("db", "SQLite Version: " + await R.getCell("SELECT sqlite_version()"));
}
}
@ -464,6 +439,7 @@ class Database {
}
await this.migrateAggregateTable(port, hostname);
} catch (e) {
// Allow missing patch files for downgrade or testing pr.
if (e.message.includes("the following files are missing:")) {
@ -480,7 +456,9 @@ class Database {
* TODO
* @returns {Promise<void>}
*/
static async rollbackLatestPatch() {}
static async rollbackLatestPatch() {
}
/**
* Patch the database for SQLite
@ -490,7 +468,7 @@ class Database {
static async patchSqlite() {
let version = parseInt(await setting("database_version"));
if (!version) {
if (! version) {
version = 0;
}
@ -520,10 +498,7 @@ class Database {
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error(
"db",
"Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues"
);
log.error("db", "Please submit a bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
process.exit(1);
}
@ -544,7 +519,7 @@ class Database {
log.debug("db", "Database Patch 2.0 Process");
let databasePatchedFiles = await setting("databasePatchedFiles");
if (!databasePatchedFiles) {
if (! databasePatchedFiles) {
databasePatchedFiles = {};
}
@ -559,15 +534,13 @@ class Database {
if (this.patched) {
log.info("db", "Database Patched Successfully");
}
} catch (ex) {
await Database.close();
log.error("db", ex);
log.error("db", "Start Uptime-Kuma failed due to issue patching the database");
log.error(
"db",
"Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues"
);
log.error("db", "Please submit the bug report if you still encounter the problem after restart: https://github.com/louislam/uptime-kuma/issues");
process.exit(1);
}
@ -581,6 +554,7 @@ class Database {
* @returns {Promise<void>}
*/
static async migrateNewStatusPage() {
// Fix 1.13.0 empty slug bug
await R.exec("UPDATE status_page SET slug = 'empty-slug-recover' WHERE TRIM(slug) = ''");
@ -602,9 +576,9 @@ class Database {
statusPage.description = await setting("description");
statusPage.icon = await setting("icon");
statusPage.theme = await setting("statusPageTheme");
statusPage.published = !!(await setting("statusPagePublished"));
statusPage.search_engine_index = !!(await setting("searchEngineIndex"));
statusPage.show_tags = !!(await setting("statusPageTags"));
statusPage.published = !!await setting("statusPagePublished");
statusPage.search_engine_index = !!await setting("searchEngineIndex");
statusPage.show_tags = !!await setting("statusPageTags");
statusPage.password = null;
if (!statusPage.title) {
@ -621,9 +595,13 @@ class Database {
let id = await R.store(statusPage);
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [id]);
await R.exec("UPDATE incident SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [id]);
await R.exec("UPDATE [group] SET status_page_id = ? WHERE status_page_id IS NULL", [
id
]);
await R.exec("DELETE FROM setting WHERE type = 'statusPage'");
@ -636,6 +614,7 @@ class Database {
console.log("Migrating Status Page - Done");
}
}
/**
@ -649,13 +628,13 @@ class Database {
static async patch2Recursion(sqlFilename, databasePatchedFiles) {
let value = this.patchList[sqlFilename];
if (!value) {
if (! value) {
log.info("db", sqlFilename + " skip");
return;
}
// Check if patched
if (!databasePatchedFiles[sqlFilename]) {
if (! databasePatchedFiles[sqlFilename]) {
log.info("db", sqlFilename + " is not patched");
if (value.parents) {
@ -670,6 +649,7 @@ class Database {
await this.importSQLFile("./db/old_migrations/" + sqlFilename);
databasePatchedFiles[sqlFilename] = true;
log.info("db", sqlFilename + " was patched successfully");
} else {
log.debug("db", sqlFilename + " is already patched, skip");
}
@ -689,15 +669,14 @@ class Database {
// Remove all comments (--)
let lines = text.split("\n");
lines = lines.filter((line) => {
return !line.startsWith("--");
return ! line.startsWith("--");
});
// Split statements by semicolon
// Filter out empty line
text = lines.join("\n");
let statements = text
.split(";")
let statements = text.split(";")
.map((statement) => {
return statement.trim();
})
@ -794,10 +773,7 @@ class Database {
// Add a setting for 2.0.0-dev users to skip this migration
if (process.env.SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE === "1") {
log.warn(
"db",
"SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE is set to 1, skipping aggregate table migration forever (for 2.0.0-dev users)"
);
log.warn("db", "SET_MIGRATE_AGGREGATE_TABLE_TO_TRUE is set to 1, skipping aggregate table migration forever (for 2.0.0-dev users)");
await Settings.set("migrateAggregateTableState", "migrated");
}
@ -837,14 +813,11 @@ class Database {
`);
// Stop if stat_* tables are not empty
for (let table of ["stat_minutely", "stat_hourly", "stat_daily"]) {
for (let table of [ "stat_minutely", "stat_hourly", "stat_daily" ]) {
let countResult = await R.getRow(`SELECT COUNT(*) AS count FROM ${table}`);
let count = countResult.count;
if (count > 0) {
log.warn(
"db",
`Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`
);
log.warn("db", `Aggregate table ${table} is not empty, migration will not be started (Maybe you were using 2.0.0-dev?)`);
await migrationServer?.stop();
return;
}
@ -853,35 +826,31 @@ class Database {
await Settings.set("migrateAggregateTableState", "migrating");
let progressPercent = 0;
for (const [i, monitor] of monitors.entries()) {
for (const [ i, monitor ] of monitors.entries()) {
// Get a list of unique dates from the heartbeat table, using raw sql
let dates = await R.getAll(
`
let dates = await R.getAll(`
SELECT DISTINCT DATE(time) AS date
FROM heartbeat
WHERE monitor_id = ?
ORDER BY date ASC
`,
[monitor.monitor_id]
);
`, [
monitor.monitor_id
]);
for (const [dateIndex, date] of dates.entries()) {
for (const [ dateIndex, date ] of dates.entries()) {
// New Uptime Calculator
let calculator = new UptimeCalculator();
calculator.monitorID = monitor.monitor_id;
calculator.setMigrationMode(true);
// Get all the heartbeats for this monitor and date
let heartbeats = await R.getAll(
`
let heartbeats = await R.getAll(`
SELECT status, ping, time
FROM heartbeat
WHERE monitor_id = ?
AND DATE(time) = ?
ORDER BY time ASC
`,
[monitor.monitor_id, date.date]
);
`, [ monitor.monitor_id, date.date ]);
if (heartbeats.length > 0) {
msg = `[DON'T STOP] Migrating monitor ${monitor.monitor_id}s' (${i + 1} of ${monitors.length} total) data - ${date.date} - total migration progress ${progressPercent.toFixed(2)}%`;
@ -894,7 +863,7 @@ class Database {
}
// Calculate progress: (current_monitor_index + relative_date_progress) / total_monitors
progressPercent = ((i + (dateIndex + 1) / dates.length) / monitors.length) * 100;
progressPercent = (i + (dateIndex + 1) / dates.length) / monitors.length * 100;
// Lazy to fix the floating point issue, it is acceptable since it is just a progress bar
if (progressPercent > 100) {
@ -931,8 +900,7 @@ class Database {
if (detailedLog) {
log.info("db", "Deleting non-important heartbeats for monitor " + monitor.id);
}
await R.exec(
`
await R.exec(`
DELETE FROM heartbeat
WHERE monitor_id = ?
AND important = 0
@ -946,11 +914,15 @@ class Database {
LIMIT ?
) AS limited_ids
)
`,
[monitor.id, -24, monitor.id, 100]
);
`, [
monitor.id,
-24,
monitor.id,
100,
]);
}
}
}
module.exports = Database;

View File

@ -7,6 +7,7 @@ const Database = require("./database");
const { axiosAbortSignal, fsExists } = require("./util-server");
class DockerHost {
static CertificateFileNameCA = "ca.pem";
static CertificateFileNameCert = "cert.pem";
static CertificateFileNameKey = "key.pem";
@ -22,11 +23,12 @@ class DockerHost {
let bean;
if (dockerHostID) {
bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [dockerHostID, userID]);
bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [ dockerHostID, userID ]);
if (!bean) {
throw new Error("docker host not found");
}
} else {
bean = R.dispense("docker_host");
}
@ -48,14 +50,14 @@ class DockerHost {
* @returns {Promise<void>}
*/
static async delete(dockerHostID, userID) {
let bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [dockerHostID, userID]);
let bean = await R.findOne("docker_host", " id = ? AND user_id = ? ", [ dockerHostID, userID ]);
if (!bean) {
throw new Error("docker host not found");
}
// Delete removed proxy from monitors if exists
await R.exec("UPDATE monitor SET docker_host = null WHERE docker_host = ?", [dockerHostID]);
await R.exec("UPDATE monitor SET docker_host = null WHERE docker_host = ?", [ dockerHostID ]);
await R.trash(bean);
}
@ -70,7 +72,7 @@ class DockerHost {
url: "/containers/json?all=true",
timeout: 5000,
headers: {
Accept: "*/*",
"Accept": "*/*",
},
signal: axiosAbortSignal(6000),
};
@ -79,24 +81,26 @@ class DockerHost {
options.socketPath = dockerHost.dockerDaemon;
} else if (dockerHost.dockerType === "tcp") {
options.baseURL = DockerHost.patchDockerURL(dockerHost.dockerDaemon);
options.httpsAgent = new https.Agent(
await DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL)
);
options.httpsAgent = new https.Agent(await DockerHost.getHttpsAgentOptions(dockerHost.dockerType, options.baseURL));
}
try {
let res = await axios.request(options);
if (Array.isArray(res.data)) {
if (res.data.length > 1) {
if ("ImageID" in res.data[0]) {
return res.data.length;
} else {
throw new Error("Invalid Docker response, is it Docker really a daemon?");
}
} else {
return res.data.length;
}
} else {
throw new Error("Invalid Docker response, is it Docker really a daemon?");
}
@ -142,35 +146,30 @@ class DockerHost {
static async getHttpsAgentOptions(dockerType, url) {
let baseOptions = {
maxCachedSessions: 0,
rejectUnauthorized: true,
rejectUnauthorized: true
};
let certOptions = {};
let dirName = new URL(url).hostname;
let dirName = (new URL(url)).hostname;
let caPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCA);
let certPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameCert);
let keyPath = path.join(Database.dockerTLSDir, dirName, DockerHost.CertificateFileNameKey);
if (
dockerType === "tcp" &&
(await fsExists(caPath)) &&
(await fsExists(certPath)) &&
(await fsExists(keyPath))
) {
if (dockerType === "tcp" && await fsExists(caPath) && await fsExists(certPath) && await fsExists(keyPath)) {
let ca = await fsAsync.readFile(caPath);
let key = await fsAsync.readFile(keyPath);
let cert = await fsAsync.readFile(certPath);
certOptions = {
ca,
key,
cert,
cert
};
}
return {
...baseOptions,
...certOptions,
...certOptions
};
}
}

Some files were not shown because too many files have changed in this diff Show More