diff --git a/.github/.pre-commit-config.yaml b/.github/.pre-commit-config.yaml
deleted file mode 100644
index 07d586e8d..000000000
--- a/.github/.pre-commit-config.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
- - repo: https://github.com/gitleaks/gitleaks
- rev: v8.16.3
- hooks:
- - id: gitleaks
- - repo: https://github.com/jumanjihouse/pre-commit-hooks
- rev: 3.0.0
- hooks:
- - id: shellcheck
- - repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
- hooks:
- - id: end-of-file-fixer
- - id: trailing-whitespace
\ No newline at end of file
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 000000000..5a9916847
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @dotneft
diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE
index 337bf9058..a57b6ed28 100644
--- a/.github/ISSUE_TEMPLATE
+++ b/.github/ISSUE_TEMPLATE
@@ -34,4 +34,4 @@
```paste below
-```
\ No newline at end of file
+```
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
new file mode 100644
index 000000000..42af9afce
--- /dev/null
+++ b/.github/workflows/dependency-review.yml
@@ -0,0 +1,31 @@
+# Dependency Review Action
+#
+# This Action will scan dependency manifest files that change as part of a Pull Request,
+# surfacing known-vulnerable versions of the packages declared or updated in the PR.
+# Once installed, if the workflow run is marked as required,
+# PRs introducing known-vulnerable packages will be blocked from merging.
+#
+# Source repository: https://github.com/actions/dependency-review-action
+name: 'Dependency Review'
+on: [pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ dependency-review:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Harden Runner
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ disable-sudo: true
+ egress-policy: block
+ allowed-endpoints: >
+ api.github.com:443
+ github.com:443
+
+ - name: 'Checkout Repository'
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - name: 'Dependency Review'
+ uses: actions/dependency-review-action@80f10bf419f34980065523f5efca7ebed17576aa # v4.1.0
diff --git a/.github/workflows/dockerhub_description.yml b/.github/workflows/dockerhub_description.yml
new file mode 100644
index 000000000..80be21c91
--- /dev/null
+++ b/.github/workflows/dockerhub_description.yml
@@ -0,0 +1,70 @@
+name: DockerHub Description
+
+on:
+ push:
+ branches:
+ - 'trunk'
+ paths:
+ - 'Dockerfiles/*/README.md'
+ - '.github/workflows/dockerhub_description.yml'
+ workflow_dispatch:
+
+env:
+ DOCKER_REPOSITORY: "zabbix"
+ IMAGES_PREFIX: "zabbix-"
+ DOCKERFILES_DIRECTORY: "./Dockerfiles"
+
+permissions:
+ contents: read
+
+jobs:
+ main:
+ name: Update description
+ runs-on: ubuntu-latest
+ env:
+ DOCKER_REPOSITORY: "zabbix"
+ permissions:
+ contents: read
+ strategy:
+ fail-fast: false
+ matrix:
+ component:
+ - build-base
+ - build-mysql
+ - build-pgsql
+ - build-sqlite3
+ - agent
+ - agent2
+ - java-gateway
+ - proxy-mysql
+ - proxy-sqlite3
+ - server-mysql
+ - server-pgsql
+ - snmptraps
+ - web-apache-mysql
+ - web-apache-pgsql
+ - web-nginx-mysql
+ - web-nginx-pgsql
+ - web-service
+ steps:
+ - name: Block egress traffic
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ disable-sudo: true
+ egress-policy: block
+ allowed-endpoints: >
+ github.com:443
+ hub.docker.com:443
+
+ - name: Checkout repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 1
+
+ - name: Update DockerHub repo description (zabbix-${{ matrix.component }})
+ uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae # v4.0.0
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+ repository: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.component }}
+ readme-filepath: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.component }}/README.md
diff --git a/.github/workflows/images_build.yml b/.github/workflows/images_build.yml
index b47ec6318..967e3772d 100644
--- a/.github/workflows/images_build.yml
+++ b/.github/workflows/images_build.yml
@@ -66,7 +66,7 @@ jobs:
github.com:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -87,7 +87,7 @@ jobs:
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- os_list=$(jq -r '.["os-linux"] | keys | [ .[] | tostring ] | @json' "$MATRIX_FILE")
+ os_list=$(jq -r '.["os-linux"] | keys | map(select(. != "rhel")) | [ .[] | tostring ] | @json' "$MATRIX_FILE")
echo "::group::Operating System List"
echo "$os_list"
@@ -95,25 +95,12 @@ jobs:
echo "list=$os_list" >> $GITHUB_OUTPUT
- - name: Prepare Platform list
- id: platform_list
- env:
- MATRIX_FILE: ${{ env.MATRIX_FILE }}
- run: |
- platform_list=$(jq -r '.["os-linux"] | tostring | @json' "$MATRIX_FILE")
-
- echo "::group::Platform List"
- echo "$platform_list"
- echo "::endgroup::"
-
- echo "list=$platform_list" >> $GITHUB_OUTPUT
-
- name: Prepare Database engine list
id: database
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- database_list=$(jq -r '[.components | values[] ] | sort | unique | del(.. | select ( . == "" ) ) | [ .[] | tostring ] | @json' "$MATRIX_FILE")
+ database_list=$(jq -r '[.components | values[].base ] | sort | unique | del(.. | select ( . == "" ) ) | @json' "$MATRIX_FILE")
echo "::group::Database List"
echo "$database_list"
@@ -126,7 +113,7 @@ jobs:
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- component_list=$(jq -r '.components | keys | [ .[] | tostring ] | @json' "$MATRIX_FILE")
+ component_list=$(jq -r '.components | keys | @json' "$MATRIX_FILE")
echo "::group::Zabbix Component List"
echo "$component_list"
@@ -184,16 +171,13 @@ jobs:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- archive.ubuntu.com:443
archive.ubuntu.com:80
atl.mirrors.knownhost.com:443
atl.mirrors.knownhost.com:80
auth.docker.io:443
cdn03.quay.io:443
- centos-distro.1gservers.com:80
centos-stream-distro.1gservers.com:443
centos-stream-distro.1gservers.com:80
- centos.mirror.shastacoe.net:80
dfw.mirror.rackspace.com:443
dfw.mirror.rackspace.com:80
dl-cdn.alpinelinux.org:443
@@ -205,8 +189,6 @@ jobs:
ftp-nyc.osuosl.org:80
ftp-osl.osuosl.org:443
ftp-osl.osuosl.org:80
- ftp.agdsn.de:443
- ftp.osuosl.org:80
ftp.plusline.net:443
ftp.plusline.net:80
ftpmirror.your.org:80
@@ -221,17 +203,13 @@ jobs:
mirror-mci.yuki.net.uk:80
mirror.arizona.edu:443
mirror.arizona.edu:80
- mirror.ash.fastserv.com:80
mirror.dogado.de:443
mirror.dogado.de:80
- mirror.ette.biz:80
mirror.facebook.net:443
mirror.facebook.net:80
mirror.fcix.net:443
mirror.hoobly.com:443
- mirror.hoobly.com:80
mirror.math.princeton.edu:443
- mirror.metrocast.net:80
mirror.netzwerge.de:443
mirror.pilotfiber.com:443
mirror.pilotfiber.com:80
@@ -241,14 +219,12 @@ jobs:
mirror.scaleuptech.com:80
mirror.servaxnet.com:443
mirror.servaxnet.com:80
- mirror.sfo12.us.leaseweb.net:80
mirror.siena.edu:80
mirror.stream.centos.org:443
mirror.stream.centos.org:80
mirror.team-cymru.com:443
mirror.team-cymru.com:80
mirror1.hs-esslingen.de:443
- mirrorlist.centos.org:80
mirrors.centos.org:443
mirrors.fedoraproject.org:443
mirrors.fedoraproject.org:80
@@ -259,38 +235,27 @@ jobs:
mirrors.sonic.net:443
mirrors.wcupa.edu:443
mirrors.wcupa.edu:80
- mirrors.xtom.com:80
- mirrors.xtom.de:443
mirrors.xtom.de:80
na.edge.kernel.org:443
nocix.mm.fcix.net:443
oauth2.sigstore.dev:443
objects.githubusercontent.com:443
- ports.ubuntu.com:443
ports.ubuntu.com:80
production.cloudflare.docker.com:443
quay.io:443
registry-1.docker.io:443
rekor.sigstore.dev:443
repo.ialab.dsu.edu:443
- repo1.sea.innoscale.net:80
repos.eggycrew.com:443
repos.eggycrew.com:80
- scientificlinux.physik.uni-muenchen.de:80
- security.ubuntu.com:443
security.ubuntu.com:80
- southfront.mm.fcix.net:80
tuf-repo-cdn.sigstore.dev:443
- tx-mirror.tier.net:80
uvermont.mm.fcix.net:443
- volico.mm.fcix.net:80
- www.gtlib.gatech.edu:80
yum.oracle.com:443
ziply.mm.fcix.net:443
- ziply.mm.fcix.net:80
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -304,18 +269,18 @@ jobs:
run: cosign version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v3
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
driver-opts: image=moby/buildkit:master
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -337,7 +302,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_BUILD_NAME }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -354,7 +319,7 @@ jobs:
- name: Build and publish image
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ env.BASE_BUILD_NAME }}/${{ matrix.os }}
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ env.BASE_BUILD_NAME }}/${{ matrix.os }}/Dockerfile
@@ -403,7 +368,7 @@ jobs:
echo "$DIGEST" > "$CACHE_FILE_NAME"
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -451,7 +416,7 @@ jobs:
rekor.sigstore.dev:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -465,18 +430,18 @@ jobs:
run: cosign version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v3
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
driver-opts: image=moby/buildkit:master
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -498,7 +463,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.build }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -514,7 +479,7 @@ jobs:
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
- name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -559,7 +524,7 @@ jobs:
- name: Build ${{ matrix.build }}/${{ matrix.os }} and push
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}/Dockerfile
@@ -608,7 +573,7 @@ jobs:
echo "$DIGEST" > $CACHE_FILE_NAME
- name: Caching SHA256 tag of the image
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ matrix.build }}_${{ matrix.os }}
key: ${{ matrix.build }}-${{ matrix.os }}-${{ github.run_id }}
@@ -635,63 +600,45 @@ jobs:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- archive.ubuntu.com:443
- archive.ubuntu.com:80
+ auth.docker.io:443
+ dl-cdn.alpinelinux.org:443
+ github.com:443
+ index.docker.io:443
+ production.cloudflare.docker.com:443
+ registry-1.docker.io:443
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+ api.github.com:443
atl.mirrors.knownhost.com:443
atl.mirrors.knownhost.com:80
auth.docker.io:443
cdn03.quay.io:443
- centos-distro.1gservers.com:80
centos-stream-distro.1gservers.com:443
centos-stream-distro.1gservers.com:80
- centos.mirror.shastacoe.net:80
d2lzkl7pfhq30w.cloudfront.net:443
- deb.debian.org:80
- dfw.mirror.rackspace.com:443
- dfw.mirror.rackspace.com:80
- dl-cdn.alpinelinux.org:443
- dl.google.com:443
- download.cf.centos.org:443
- download.cf.centos.org:80
- epel.mirror.constant.com:443
epel.mirror.constant.com:80
forksystems.mm.fcix.net:80
ftp-nyc.osuosl.org:443
ftp-nyc.osuosl.org:80
ftp-osl.osuosl.org:443
ftp-osl.osuosl.org:80
- ftp.agdsn.de:443
- ftp.osuosl.org:80
- ftp.plusline.net:443
ftp.plusline.net:80
ftpmirror.your.org:80
- fulcio.sigstore.dev:443
github.com:443
iad.mirror.rackspace.com:443
- iad.mirror.rackspace.com:80
index.docker.io:443
ix-denver.mm.fcix.net:443
- keyserver.ubuntu.com:11371
- lesnet.mm.fcix.net:443
mirror-mci.yuki.net.uk:443
- mirror-mci.yuki.net.uk:80
mirror.23m.com:80
- mirror.arizona.edu:443
mirror.arizona.edu:80
- mirror.ash.fastserv.com:80
mirror.dal.nexril.net:80
mirror.de.leaseweb.net:80
- mirror.dogado.de:443
mirror.dogado.de:80
- mirror.ette.biz:80
- mirror.facebook.net:443
mirror.facebook.net:80
- mirror.fcix.net:443
- mirror.hoobly.com:443
mirror.hoobly.com:80
- mirror.math.princeton.edu:443
mirror.math.princeton.edu:80
- mirror.metrocast.net:80
mirror.netcologne.de:443
mirror.netzwerge.de:443
mirror.pilotfiber.com:443
@@ -699,77 +646,74 @@ jobs:
mirror.rackspace.com:443
mirror.rackspace.com:80
mirror.scaleuptech.com:443
- mirror.scaleuptech.com:80
mirror.servaxnet.com:443
mirror.servaxnet.com:80
mirror.sfo12.us.leaseweb.net:80
mirror.siena.edu:80
mirror.steadfastnet.com:80
- mirror.stream.centos.org:443
- mirror.stream.centos.org:80
mirror.team-cymru.com:443
mirror.team-cymru.com:80
mirror.umd.edu:443
mirror1.hs-esslingen.de:443
- mirrorlist.centos.org:80
mirrors.centos.org:443
mirrors.fedoraproject.org:443
- mirrors.fedoraproject.org:80
mirrors.iu13.net:443
mirrors.iu13.net:80
- mirrors.mit.edu:443
mirrors.ocf.berkeley.edu:443
- mirrors.ocf.berkeley.edu:80
- mirrors.sonic.net:443
mirrors.sonic.net:80
mirrors.syringanetworks.net:80
mirrors.vcea.wsu.edu:80
- mirrors.wcupa.edu:443
mirrors.wcupa.edu:80
- mirrors.xtom.com:80
- mirrors.xtom.de:443
mirrors.xtom.de:80
na.edge.kernel.org:443
- nginx.org:443
- nginx.org:80
nnenix.mm.fcix.net:80
- nocix.mm.fcix.net:443
- oauth2.sigstore.dev:443
- objects.githubusercontent.com:443
ohioix.mm.fcix.net:80
- ports.ubuntu.com:443
- ports.ubuntu.com:80
production.cloudflare.docker.com:443
pubmirror1.math.uh.edu:443
pubmirror3.math.uh.edu:80
quay.io:443
registry-1.docker.io:443
- rekor.sigstore.dev:443
- repo.ialab.dsu.edu:443
repo.ialab.dsu.edu:80
- repo1.sea.innoscale.net:80
- repos.eggycrew.com:443
repos.eggycrew.com:80
- scientificlinux.physik.uni-muenchen.de:80
- security.ubuntu.com:443
- security.ubuntu.com:80
- southfront.mm.fcix.net:80
- tuf-repo-cdn.sigstore.dev:443
- tx-mirror.tier.net:80
- uvermont.mm.fcix.net:443
uvermont.mm.fcix.net:80
- volico.mm.fcix.net:80
- www.gtlib.gatech.edu:80
- yum.oracle.com:443
ziply.mm.fcix.net:443
- ziply.mm.fcix.net:80
- keyserver.ubuntu.com:80
- p80.pool.sks-keyservers.net:80
- pgp.mit.edu:11371
- ha.pool.sks-keyservers.net:11371
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+ oauth2.sigstore.dev:443
+ api.github.com:443
+ auth.docker.io:443
+ github.com:443
+ index.docker.io:443
+ production.cloudflare.docker.com:443
+ registry-1.docker.io:443
+ yum.oracle.com:443
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+ api.github.com:443
+ archive.ubuntu.com:80
+ auth.docker.io:443
+ deb.debian.org:80
+ github.com:443
+ index.docker.io:443
+ keyserver.ubuntu.com:11371
+ nginx.org:443
+ nginx.org:80
+ ports.ubuntu.com:80
+ production.cloudflare.docker.com:443
+ registry-1.docker.io:443
+ security.ubuntu.com:80
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+>>>>>>> 05d38ba67 (Merge pull request #1188 from zabbix/merge_6.0)
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -783,18 +727,18 @@ jobs:
run: cosign version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v3
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
driver-opts: image=moby/buildkit:master
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -835,7 +779,7 @@ jobs:
MATRIX_BUILD: ${{ matrix.build }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\"" "$MATRIX_FILE")
+ BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\".base" "$MATRIX_FILE")
echo "::group::Base Build Image"
echo "$BUILD_BASE"
@@ -845,7 +789,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX}}${{ matrix.build }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -861,7 +805,7 @@ jobs:
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
- name: Download SHA256 tag of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
if: ${{ matrix.build != 'snmptraps' }}
with:
path: ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.os }}
@@ -925,7 +869,7 @@ jobs:
- name: Build and push image
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}/Dockerfile
diff --git a/.github/workflows/images_build_rhel.yml b/.github/workflows/images_build_rhel.yml
index 89623de4c..fa803125f 100644
--- a/.github/workflows/images_build_rhel.yml
+++ b/.github/workflows/images_build_rhel.yml
@@ -4,7 +4,22 @@ on:
release:
types:
- published
+ push:
+ branches:
+ - '[0-9]+.[0-9]+'
+ - 'trunk'
+ paths:
+ - 'Dockerfiles/*/rhel/*'
+ - 'build.json'
+ - '!**/README.md'
+ - '.github/workflows/images_build_rhel.yml'
workflow_dispatch:
+ inputs:
+ publish_images:
+ description: 'Publish images'
+ required: true
+ default: false
+ type: boolean
defaults:
run:
@@ -14,13 +29,14 @@ permissions:
contents: read
env:
- AUTO_PUSH_IMAGES: ${{ contains(fromJSON('["workflow_dispatch"]'), github.event_name) && 'false' || vars.AUTO_PUSH_IMAGES }}
+ AUTO_PUSH_IMAGES: ${{ contains(fromJSON('["workflow_dispatch", "push"]'), github.event_name) && 'false' || vars.AUTO_PUSH_IMAGES }}
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
TRUNK_GIT_BRANCH: "refs/heads/trunk"
IMAGES_PREFIX: "zabbix-"
BASE_BUILD_NAME: "build-base"
+ MATRIX_FILE: "build.json"
DOCKERFILES_DIRECTORY: "Dockerfiles"
OIDC_ISSUER: "https://token.actions.githubusercontent.com"
@@ -31,6 +47,9 @@ env:
PREFLIGHT_IMAGE: "quay.io/opdev/preflight:stable"
PFLT_LOGLEVEL: "warn"
PFLT_ARTIFACTS: "/tmp/artifacts"
+ IMAGE_DIR: "/tmp/images"
+
+ RHEL_BUILD: "true"
jobs:
init_build:
@@ -38,11 +57,15 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
+ actions: write
outputs:
+ platforms: ${{ steps.platform_list.outputs.list }}
+ database: ${{ steps.database.outputs.list }}
components: ${{ steps.components.outputs.list }}
is_default_branch: ${{ steps.branch_info.outputs.is_default_branch }}
current_branch: ${{ steps.branch_info.outputs.current_branch }}
sha_short: ${{ steps.branch_info.outputs.sha_short }}
+ secret_prefix: ${{ steps.branch_info.outputs.secret_prefix }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
@@ -50,12 +73,64 @@ jobs:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
+ api.github.com:443
github.com:443
+ objects.githubusercontent.com:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
+ sparse-checkout: ${{ env.MATRIX_FILE }}
+
+ - name: Check ${{ env.MATRIX_FILE }} file
+ id: build_exists
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ if [[ ! -f "$MATRIX_FILE" ]]; then
+ echo "::error::File $MATRIX_FILE is missing"
+ exit 1
+ fi
+
+ - name: Prepare Platform list
+ id: platform_list
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ platform_list=$(jq -r '.["os-linux"].rhel | @json' "$MATRIX_FILE")
+
+ echo "::group::Platform List"
+ echo "$platform_list"
+ echo "::endgroup::"
+
+ echo "list=$platform_list" >> $GITHUB_OUTPUT
+
+ - name: Prepare Database engine list
+ id: database
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ database_list=$(jq -r '[.components | map_values(select(.rhel == true)) | values[].base ] | sort | unique | del(.. | select ( . == "" ) ) | @json' "$MATRIX_FILE")
+
+ echo "::group::Database List"
+ echo "$database_list"
+ echo "::endgroup::"
+
+ echo "list=$database_list" >> $GITHUB_OUTPUT
+
+ - name: Prepare Zabbix component list
+ id: components
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ component_list=$(jq -r '.components | map_values(select(.rhel == true)) | keys | @json' "$MATRIX_FILE")
+
+ echo "::group::Zabbix Component List"
+ echo "$component_list"
+ echo "::endgroup::"
+
+ echo "list=$component_list" >> $GITHUB_OUTPUT
- name: Get branch info
id: branch_info
@@ -77,30 +152,39 @@ jobs:
result=true
fi
- echo "::group::Branch data"
+ echo "::group::Branch metadata"
echo "is_default_branch - $result"
echo "current_branch - $github_ref"
+ echo "secret_prefix=RHEL_${github_ref//.}"
echo "sha_short - $sha_short"
echo "::endgroup::"
echo "is_default_branch=$result" >> $GITHUB_OUTPUT
echo "current_branch=$github_ref" >> $GITHUB_OUTPUT
+ echo "secret_prefix=RHEL_${github_ref//.}" >> $GITHUB_OUTPUT
echo "sha_short=$sha_short" >> $GITHUB_OUTPUT
-
- - name: Prepare Zabbix component list
- id: components
+ - name: Cleanup cache
+ shell: bash
env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
- CURRENT_BRANCH: ${{ steps.branch_info.outputs.current_branch }}
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ REPO: ${{ github.repository }}
+ BRANCH: ${{ steps.branch_info.outputs.current_branch }}
+ GH_RUN_ID: ${{ github.run_id }}
run: |
- component_list=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components | keys | @json")
+ gh extension install actions/gh-actions-cache
- echo "::group::Zabbix Component List"
- echo "$component_list"
- echo "::endgroup::"
+ cache_keys=$(gh actions-cache list -R "${REPO}" -B "${BRANCH}" -L 100 --sort created-at --order desc | cut -f 1)
- echo "list=$component_list" >> $GITHUB_OUTPUT
+ ## Setting this to not fail the workflow while deleting cache keys
+ set +e
+ echo "Deleting caches..."
+ for cache_key in $cache_keys
+ do
+ if [[ "$cache_key" == *"${GH_RUN_ID}" ]]; then
+ gh actions-cache delete $cache_key -R "${REPO}" -B "${BRANCH}" --confirm
+ fi
+ done
build_base:
timeout-minutes: 30
@@ -110,13 +194,13 @@ jobs:
fail-fast: false
matrix:
build: [build-base]
- arch: [X64, ARM64]
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
@@ -137,15 +221,15 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
tags: |
type=sha,suffix=-${{ steps.lc.outputs.arch }}
- - name: Build Zabbix Build Base
+ - name: Build image
id: build_image
- uses: redhat-actions/buildah-build@v2
+ uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
layers: false
@@ -154,28 +238,83 @@ jobs:
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
extra-args: |
--pull
+ --iidfile=${{ github.workspace }}/iidfile
- - name: Image digest
+ - name: Prepare image metadata
+ id: image_metadata
env:
- IMAGE_TAG: ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
+ IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
run: |
- DIGEST=$(podman inspect ${IMAGE_TAG} --format "{{ index .RepoDigests 0}}" | cut -d '@' -f2)
- echo "::group::Image digest"
- echo "$DIGEST"
+ TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
+
+ echo "::group::Image tag"
+ echo "image_tag=$IMAGE_TAG"
+ echo "::endgroup::"
+ echo "::group::Image Tag ID"
+ echo "tag_id=$TAG_ID"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$CACHE_FILE_NAME"
echo "::endgroup::"
- echo "$DIGEST" > "$CACHE_FILE_NAME"
+ echo "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
+ echo "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
- - name: Cache image digest
- uses: actions/cache@v4
+ echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
+ echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
+
+ - name: Cache image metadata
+ uses: actions/cache/save@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ path: |
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
+ - name: Push image to local storage
+ id: push_image
+ env:
+ IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+ echo "Image ${IMAGE_TAG} location: \"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
+ podman push "${IMAGE_TAG}" dir:"${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
+ - name: Post build image
+ if: ${{ success() || failure() }}
+ env:
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+
+ rm -rf "$GITHUB_WORKSPACE/iidfile"
+
+ echo "Removing working containers"
+ buildah rm -a 2>/dev/null || true
+ echo "Removing container data in storage not controlled by podman"
+ podman system prune --external 2>/dev/null
+ echo "Removing all unused container data with volumes"
+ podman system prune -a --volumes -f 2>/dev/null
+ echo "Reseting podman storage to default state"
+ podman system reset -f 2>/dev/null || true
+
+ echo "::endgroup::"
+
+ - name: Check on failures
+ if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
+ env:
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ run: |
+ echo "::group::Removing orphaned image"
+ rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
build_base_database:
timeout-minutes: 180
needs: [ "build_base", "init_build"]
@@ -183,15 +322,14 @@ jobs:
strategy:
fail-fast: false
matrix:
- build: [build-mysql, build-sqlite3]
- arch: [X64, ARM64]
+ build: ${{ fromJson(needs.init_build.outputs.database) }}
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
- id-token: write
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
@@ -202,41 +340,54 @@ jobs:
run: |
echo "arch=${ARCH,,}" >> $GITHUB_OUTPUT
- - name: Generate tags
- id: meta
- uses: docker/metadata-action@v5
+ - name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
- tags: |
- type=sha,suffix=-${{ steps.lc.outputs.arch }}
-
- - name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
- uses: actions/cache@v4
- with:
- path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ path: |
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
- - name: Retrieve ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} SHA256 tag
+ - name: Pull ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} image
id: base_build
env:
MATRIX_ARCH: ${{ matrix.arch }}
BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
+ BASE_TAG_ID=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag_id")
+ BASE_IMAGE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag")
- echo "::group::Base build image information"
- echo "base_tag=${BASE_TAG}"
- echo "base_build_image=${BUILD_BASE_IMAGE}"
+ echo "::group::Pull image"
+ echo "podman pull dir:\"${IMAGE_DIR}/${BASE_TAG_ID}\""
+ podman pull dir:"${IMAGE_DIR}/${BASE_TAG_ID}"
echo "::endgroup::"
- echo "base_tag=${BASE_TAG}" >> $GITHUB_OUTPUT
- echo "base_build_image=${BUILD_BASE_IMAGE}" >> $GITHUB_OUTPUT
+ echo "::group::Tag image"
+ echo "podman tag \"${BASE_TAG_ID}\" \"${BASE_IMAGE_TAG}\""
+ podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
+ echo "::endgroup::"
- - name: Build Zabbix Build Base
+ echo "::group::SHA256 tag"
+ DIGEST=$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')
+ BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
+
+ echo "base_build_image=${BASE_BUILD_IMAGE}"
+ echo "::endgroup::"
+
+ echo "base_build_image=${BASE_BUILD_IMAGE}" >> $GITHUB_OUTPUT
+
+ - name: Generate tags
+ id: meta
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
+ with:
+ images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
+ tags: |
+ type=sha,suffix=-${{ steps.lc.outputs.arch }}
+
+ - name: Build image
id: build_image
- uses: redhat-actions/buildah-build@v2
+ uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
layers: false
@@ -244,28 +395,84 @@ jobs:
containerfiles: |
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
+ extra-args: |
+ --iidfile=${{ github.workspace }}/iidfile
- - name: Image digest
+ - name: Prepare image metadata
+ id: image_metadata
env:
- IMAGE_TAG: ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
+ IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.arch }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
run: |
- DIGEST=$(podman inspect ${IMAGE_TAG} --format "{{ index .RepoDigests 0}}" | cut -d '@' -f2)
- echo "::group::Image digest"
- echo "$DIGEST"
+ TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
+
+ echo "::group::Image tag"
+ echo "image_tag=$IMAGE_TAG"
+ echo "::endgroup::"
+ echo "::group::Image Tag ID"
+ echo "tag_id=$TAG_ID"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$CACHE_FILE_NAME"
echo "::endgroup::"
- echo "$DIGEST" > "$CACHE_FILE_NAME"
+ echo "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
+ echo "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
+
+ echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
+ echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache/save@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ matrix.build }}_${{ matrix.arch }}
+ path: |
+ ${{ matrix.build }}_${{ matrix.arch }}_tag_id
+ ${{ matrix.build }}_${{ matrix.arch }}_tag
key: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
+ - name: Push image to local storage
+ id: push_image
+ env:
+ IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+ echo "podman push \"${IMAGE_TAG}\" dir:\"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
+ podman push "${IMAGE_TAG}" dir:"${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
+ - name: Post build image
+ if: ${{ success() || failure() }}
+ env:
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+
+ rm -rf "$GITHUB_WORKSPACE/iidfile"
+
+ echo "Removing working containers"
+ buildah rm -a 2>/dev/null || true
+ echo "Removing container data in storage not controlled by podman"
+ podman system prune --external 2>/dev/null
+ echo "Removing all unused container data with volumes"
+ podman system prune -a --volumes -f 2>/dev/null
+ echo "Reseting podman storage to default state"
+ podman system reset -f 2>/dev/null || true
+
+ echo "::endgroup::"
+
+ - name: Check on failures
+ if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
+ env:
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ run: |
+ echo "::group::Removing orphaned image"
+ rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
build_images:
timeout-minutes: 90
needs: [ "build_base_database", "init_build"]
@@ -274,79 +481,81 @@ jobs:
fail-fast: false
matrix:
build: ${{ fromJson(needs.init_build.outputs.components) }}
- arch: [X64, ARM64]
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
- id-token: write
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
- - name: Fix string case
- id: lc
+ - name: Variables formating
+ id: var_format
env:
- ARCH: ${{ matrix.arch }}
+ MATRIX_BUILD: ${{ matrix.build }}
run: |
- echo "arch=${ARCH,,}" >> $GITHUB_OUTPUT
+ MATRIX_BUILD=${MATRIX_BUILD^^}
+ MATRIX_BUILD=${MATRIX_BUILD//-/_}
+
+ echo "::group::Result"
+ echo "matrix_build=${MATRIX_BUILD}"
+ echo "::endgroup::"
+ echo "matrix_build=${MATRIX_BUILD}" >> $GITHUB_OUTPUT
- name: Detect Build Base Image
id: build_base_image
+ if: ${{ matrix.build != 'snmptraps' }}
env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
MATRIX_BUILD: ${{ matrix.build }}
- CURRENT_BRANCH: ${{ needs.init_build.outputs.current_branch }}
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- BUILD_BASE=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".build_base")
+ BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\".base" "$MATRIX_FILE")
- echo "::group::Build base image"
- echo "build_base=$BUILD_BASE"
+ echo "::group::Base Build Image"
+ echo "$BUILD_BASE"
echo "::endgroup::"
- echo "build_base=$BUILD_BASE" >> $GITHUB_OUTPUT
+ echo "build_base=${BUILD_BASE}" >> $GITHUB_OUTPUT
- - name: Generate image name
- id: image_name
- env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
- MATRIX_BUILD: ${{ matrix.build }}
- CURRENT_BRANCH: ${{ needs.init_build.outputs.current_branch }}
- run: |
- IMAGE_NAME=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".login")
-
- echo "::add-mask::$IMAGE_NAME"
- echo "image_name=$IMAGE_NAME" >> $GITHUB_OUTPUT
-
- - name: Generate credentials
- id: login_credentials
- env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
- MATRIX_BUILD: ${{ matrix.build }}
- CURRENT_BRANCH: ${{ needs.init_build.outputs.current_branch }}
- run: |
- IMAGE_NAME=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".login")
- REGISTRY_PASSWORD=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".secret")
-
- echo "::add-mask::$IMAGE_NAME"
- echo "::add-mask::redhat-isv-containers+$IMAGE_NAME-robot"
- echo "::add-mask::$REGISTRY_PASSWORD"
-
- echo "username=$IMAGE_NAME" >> $GITHUB_OUTPUT
- echo "password=$REGISTRY_PASSWORD" >> $GITHUB_OUTPUT
-
- - name: Log in to Quay.io
- uses: redhat-actions/podman-login@v1.6
- if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
- env:
- LOGIN: ${{ steps.login_credentials.outputs.username }}
- PASSWORD: ${{ steps.login_credentials.outputs.password }}
+ - name: Download metadata of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }}
+ if: ${{ matrix.build != 'snmptraps' }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- username: redhat-isv-containers+${{ env.LOGIN }}-robot
- password: ${{ env.PASSWORD }}
- registry: ${{ env.REGISTRY }}
- auth_file_path: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
+ path: |
+ ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}_tag_id
+ ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}_tag
+ key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.arch }}-${{ github.run_id }}
+
+ - name: Pull ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }} image
+ id: base_build
+ if: ${{ matrix.build != 'snmptraps' }}
+ env:
+ MATRIX_ARCH: ${{ matrix.arch }}
+ BASE_IMAGE: ${{ steps.build_base_image.outputs.build_base }}
+ IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
+ run: |
+ BASE_TAG_ID=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag_id")
+ BASE_IMAGE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag")
+
+ echo "::group::Pull image"
+ echo "podman pull dir:\"${IMAGE_DIR}/${BASE_TAG_ID}\""
+ podman pull dir:"${IMAGE_DIR}/${BASE_TAG_ID}"
+ echo "::endgroup::"
+
+ echo "::group::Tag image"
+ echo "podman tag \"${BASE_TAG_ID}\" \"${BASE_IMAGE_TAG}\""
+ podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
+ echo "::endgroup::"
+
+ echo "::group::SHA256 tag"
+ DIGEST=$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')
+ BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
+ echo "digest=${BASE_BUILD_IMAGE}"
+ echo "::endgroup::"
+
+ echo "base_build_image=${BASE_BUILD_IMAGE}" >> $GITHUB_OUTPUT
- name: Remove smartmontools
if: ${{ matrix.build == 'agent2' }}
@@ -357,9 +566,9 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
- images: ${{ env.REGISTRY }}/${{ env.REGISTRY_NAMESPACE }}/${{ steps.image_name.outputs.image_name }}
+ images: ${{ env.REGISTRY }}/${{ env.REGISTRY_NAMESPACE }}/${{ secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] || matrix.build }}
tags: |
type=semver,pattern={{version}}
type=sha
@@ -367,33 +576,9 @@ jobs:
latest=${{ github.event_name == 'release' }}
suffix=${{ matrix.arch == 'ARM64' && '-arm64' || '' }},onlatest=true
- - name: Download SHA256 tag of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }}
- uses: actions/cache@v4
- with:
- path: ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}
- key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.arch }}-${{ github.run_id }}
-
- - name: Retrieve ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }} SHA256 tag
- id: base_build
- env:
- MATRIX_ARCH: ${{ matrix.arch }}
- BASE_IMAGE: ${{ steps.build_base_image.outputs.build_base }}
- IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
- run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
-
- echo "::group::Base build image information"
- echo "base_tag=${BASE_TAG}"
- echo "base_build_image=${BUILD_BASE_IMAGE}"
- echo "::endgroup::"
-
- echo "base_tag=${BASE_TAG}" >> $GITHUB_OUTPUT
- echo "base_build_image=${BUILD_BASE_IMAGE}" >> $GITHUB_OUTPUT
-
- - name: Build ${{ matrix.build }}
+ - name: Build image
id: build_image
- uses: redhat-actions/buildah-build@v2
+ uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
layers: false
@@ -405,18 +590,27 @@ jobs:
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
- - name: Push to RedHat certification procedure
+ - name: Log in to ${{ env.REGISTRY }}
+ uses: redhat-actions/podman-login@9184318aae1ee5034fbfbacc0388acf12669171f # v1.6
+ if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
+ with:
+ username: ${{ format('redhat-isv-containers+{0}-robot', secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)]) }}
+ password: ${{ secrets[format('{0}_{1}_SECRET', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
+ registry: ${{ env.REGISTRY }}
+ auth_file_path: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
+
+ - name: Push to RedHat certification procedure (1st)
id: push_to_registry
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
- uses: redhat-actions/push-to-registry@v2
+ uses: redhat-actions/push-to-registry@9986a6552bc4571882a4a67e016b17361412b4df # v2.7.1
with:
tags: ${{ steps.meta.outputs.tags }}
- - name: Preflight
+ - name: Preflight certification
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
env:
PFLT_DOCKERCONFIG: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
- PFLT_CERTIFICATION_PROJECT_ID: ${{ steps.login_credentials.outputs.username }}
+ PFLT_CERTIFICATION_PROJECT_ID: ${{ secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
PFLT_PYXIS_API_TOKEN: ${{ secrets.REDHAT_API_TOKEN }}
PFLT_ARTIFACTS: ${{ env.PFLT_ARTIFACTS }}
PFLT_LOGLEVEL: ${{ env.PFLT_LOGLEVEL }}
@@ -424,7 +618,7 @@ jobs:
PREFLIGHT_IMAGE: ${{ env.PREFLIGHT_IMAGE }}
run: |
mkdir -p $PFLT_ARTIFACTS
- echo "::group::Pull preflight image"
+ echo "::group::Pull preflight \"$PREFLIGHT_IMAGE\" image"
podman pull "$PREFLIGHT_IMAGE"
echo "::endgroup::"
@@ -445,70 +639,91 @@ jobs:
podman rmi -i -f "$PREFLIGHT_IMAGE"
echo "::endgroup::"
- - name: Push to RedHat certification procedure
+ - name: Push to RedHat certification procedure (all tags)
id: push_to_registry_all_tags
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
- uses: redhat-actions/push-to-registry@v2
+ uses: redhat-actions/push-to-registry@9986a6552bc4571882a4a67e016b17361412b4df # v2.7.1
with:
tags: ${{ steps.meta.outputs.tags }}
- - name: Cleanup artifacts
- if: ${{ always() }}
+ - name: Post Preflight certification
+ if: ${{ env.AUTO_PUSH_IMAGES == 'true' && (success() || failure()) }}
env:
PREFLIGHT_IMAGE: ${{ env.PREFLIGHT_IMAGE }}
PFLT_ARTIFACTS: ${{ env.PFLT_ARTIFACTS }}
- TAGS: ${{ steps.meta.outputs.tags }}
run: |
- echo "::group::Post build actions"
- echo "$TAGS" | while IFS= read -r image_name ; do podman rmi -i -f "$image_name"; done
+ echo "::group::Result"
rm -rf "$PFLT_ARTIFACTS"
podman rmi -i -f "$PREFLIGHT_IMAGE"
echo "::endgroup::"
- clean_artifacts:
+ - name: Post build image
+ if: ${{ success() || failure() }}
+ run: |
+ echo "::group::Result"
+
+ echo "Removing working containers"
+ buildah rm -a 2>/dev/null || true
+ echo "Removing container data in storage not controlled by podman"
+ podman system prune --external 2>/dev/null
+ echo "Removing all unused container data with volumes"
+ podman system prune -a --volumes -f 2>/dev/null
+ echo "Reseting podman storage to default state"
+ podman system reset -f 2>/dev/null || true
+
+ echo "::endgroup::"
+
+ clear_artifacts:
timeout-minutes: 90
needs: [ "build_images", "init_build"]
- name: Build ${{ matrix.build }} image (${{ matrix.arch }})
+ name: Clear ${{ matrix.build }} image cache (${{ matrix.arch }})
strategy:
fail-fast: false
matrix:
- build: [build-mysql, build-sqlite3]
- arch: [X64, ARM64]
+ build: ${{ fromJson(needs.init_build.outputs.database) }}
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
- if: ${{ always() && needs.build_base_database.result == 'success' }}
+ if: ${{ needs.build_base_database.result == 'success' }}
permissions: {}
steps:
- - name: Download SHA256 tag of ${{ matrix.build }}:${{ matrix.arch }}
- uses: actions/cache@v4
+ - name: Download metadata of ${{ matrix.build }}:${{ matrix.arch }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ matrix.build }}_${{ matrix.arch }}
+ path: |
+ ${{ matrix.build }}_${{ matrix.arch }}_tag_id
+ ${{ matrix.build }}_${{ matrix.arch }}_tag
key: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
- - name: Remove ${{ matrix.build }}:${{ matrix.arch }} SHA256 tag
+ - name: Remove ${{ matrix.build }}:${{ matrix.arch }} cache
env:
- MATRIX_ARCH: ${{ matrix.arch }}
- BASE_IMAGE: ${{ matrix.build }}
- IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
+ CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.arch }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
+ echo "::group::Result"
- podman rmi -i -f "$BUILD_BASE_IMAGE"
+ BASE_TAG=$(cat "${CACHE_FILE_NAME}_tag_id")
+ echo "Removing ${IMAGE_DIR}/${BASE_TAG}"
+ rm -rf "${IMAGE_DIR}/${BASE_TAG}"
- - name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
- uses: actions/cache@v4
+ echo "::endgroup::"
+
+ - name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ path: |
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
-
- - name: Remove ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} SHA256 tag
+ - name: Remove ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} cache
env:
- MATRIX_ARCH: ${{ matrix.arch }}
- BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
- IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
+ CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
+ echo "::group::Result"
- podman rmi -i -f "$BUILD_BASE_IMAGE"
+ BASE_TAG=$(cat "${CACHE_FILE_NAME}_tag_id")
+ echo "Removing ${IMAGE_DIR}/${BASE_TAG}"
+ rm -rf "${IMAGE_DIR}/${BASE_TAG}"
+
+ echo "::endgroup::"
diff --git a/.github/workflows/images_build_windows.yml b/.github/workflows/images_build_windows.yml
index 7da254980..154f26431 100644
--- a/.github/workflows/images_build_windows.yml
+++ b/.github/workflows/images_build_windows.yml
@@ -67,7 +67,7 @@ jobs:
github.com:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -155,7 +155,7 @@ jobs:
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -169,7 +169,7 @@ jobs:
run: cosign version
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -190,7 +190,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_IMAGE_NAME }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -324,7 +324,7 @@ jobs:
$Env:DIGEST | Set-Content -Path $Env:CACHE_FILE_NAME
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -344,7 +344,7 @@ jobs:
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -358,7 +358,7 @@ jobs:
run: cosign version
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -379,7 +379,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_BUILD_IMAGE_NAME }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -395,7 +395,7 @@ jobs:
latest=false
- name: Download SHA256 tag of ${{ env.BASE_IMAGE_NAME }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -551,7 +551,7 @@ jobs:
$Env:DIGEST | Set-Content -Path $Env:CACHE_FILE_NAME
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -571,7 +571,7 @@ jobs:
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -585,7 +585,7 @@ jobs:
run: cosign version
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -606,7 +606,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.component }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -622,7 +622,7 @@ jobs:
latest=false
- name: Download SHA256 tag of ${{ env.BASE_BUILD_IMAGE_NAME }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 08faac455..3aa8ef685 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -32,13 +32,18 @@ jobs:
# actions: read
steps:
+ - name: Harden Runner
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ egress-policy: audit
+
- name: "Checkout code"
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
persist-credentials: false
- name: "Run analysis"
- uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2
+ uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with:
results_file: results.sarif
results_format: sarif
@@ -60,7 +65,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
+ uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: SARIF file
path: results.sarif
@@ -68,6 +73,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
- uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
+ uses: github/codeql-action/upload-sarif@e675ced7a7522a761fc9c8eb26682c8b27c42b2b # v3.24.1
with:
sarif_file: results.sarif
diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml
new file mode 100644
index 000000000..2fa6f8be5
--- /dev/null
+++ b/.github/workflows/sonarcloud.yml
@@ -0,0 +1,79 @@
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+# This workflow helps you trigger a SonarCloud analysis of your code and populates
+# GitHub Code Scanning alerts with the vulnerabilities found.
+# Free for open source project.
+
+# 1. Login to SonarCloud.io using your GitHub account
+
+# 2. Import your project on SonarCloud
+# * Add your GitHub organization first, then add your repository as a new project.
+# * Please note that many languages are eligible for automatic analysis,
+# which means that the analysis will start automatically without the need to set up GitHub Actions.
+# * This behavior can be changed in Administration > Analysis Method.
+#
+# 3. Follow the SonarCloud in-product tutorial
+# * a. Copy/paste the Project Key and the Organization Key into the args parameter below
+# (You'll find this information in SonarCloud. Click on "Information" at the bottom left)
+#
+# * b. Generate a new token and add it to your Github repository's secrets using the name SONAR_TOKEN
+# (On SonarCloud, click on your avatar on top-right > My account > Security
+# or go directly to https://sonarcloud.io/account/security/)
+
+# Feel free to take a look at our documentation (https://docs.sonarcloud.io/getting-started/github/)
+# or reach out to our community forum if you need some help (https://community.sonarsource.com/c/help/sc/9)
+
+name: SonarCloud analysis
+
+on:
+ push:
+ branches: [ "6.4" ]
+ pull_request:
+ branches: [ "6.4" ]
+ workflow_dispatch:
+
+permissions:
+ pull-requests: read # allows SonarCloud to decorate PRs with analysis results
+
+jobs:
+ Analysis:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Block egress traffic
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ egress-policy: audit
+
+ - name: Checkout repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ # Disabling shallow clone is recommended for improving relevancy of reporting
+ fetch-depth: 0
+
+ - name: Analyze with SonarCloud
+
+ # You can pin the exact commit or the version.
+ # uses: SonarSource/sonarcloud-github-action@49e6cd3b187936a73b8280d59ffd9da69df63ec9
+ uses: SonarSource/sonarcloud-github-action@49e6cd3b187936a73b8280d59ffd9da69df63ec9 # v2.1.1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} # Generate a token on Sonarcloud.io, add it to the secrets of this repo with the name SONAR_TOKEN (Settings > Secrets > Actions > add new repository secret)
+ with:
+ # Additional arguments for the sonarcloud scanner
+ args:
+ # Unique keys of your project and organization. You can find them in SonarCloud > Information (bottom-left menu)
+ # mandatory
+ -Dsonar.projectKey=zabbix_zabbix-docker
+ -Dsonar.organization=zabbix
+ # Comma-separated paths to directories containing main source files.
+ #-Dsonar.sources= # optional, default is project base directory
+ # When you need the analysis to take place in a directory other than the one from which it was launched
+ #-Dsonar.projectBaseDir= # optional, default is .
+ # Comma-separated paths to directories containing test source files.
+ #-Dsonar.tests= # optional. For more info about Code Coverage, please refer to https://docs.sonarcloud.io/enriching/test-coverage/overview/
+ # Adds more detail to both client and server-side analysis logs, activating DEBUG mode for the scanner, and adding client-side environment variables and system properties to the server-side log of analysis report processing.
+ #-Dsonar.verbose= # optional, default is false
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..74989927d
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,41 @@
+repos:
+- repo: https://github.com/gitleaks/gitleaks
+ rev: v8.16.3
+ hooks:
+ - id: gitleaks
+- repo: https://github.com/jumanjihouse/pre-commit-hooks
+ rev: 3.0.0
+ hooks:
+ - id: shellcheck
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: end-of-file-fixer
+ exclude: |
+ (?x)(
+ ^env_vars/\.MYSQL |
+ ^env_vars/\.POSTGRES
+ )
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: trailing-whitespace
+ exclude: |
+ (?x)(
+ .*\.patch$
+ )
+ - id: check-yaml
+ args: [--allow-multiple-documents]
+ - id: check-symlinks
+# - id: pretty-format-json
+ - id: check-added-large-files
+#- repo: https://github.com/adrienverge/yamllint.git
+# rev: v1.21.0 # or higher tag
+# hooks:
+# - id: yamllint
+# args: [--format, parsable, --strict]
+#- repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt
+# rev: 0.2.3
+# hooks:
+# - id: yamlfmt
+# args: [--mapping, '2', --sequence, '1', --offset, '0', --colons, --width, '400']
diff --git a/Dockerfiles/agent/rhel/Dockerfile b/Dockerfiles/agent/rhel/Dockerfile
index 2a9efdcd2..c1153b946 100644
--- a/Dockerfiles/agent/rhel/Dockerfile
+++ b/Dockerfiles/agent/rhel/Dockerfile
@@ -58,7 +58,7 @@ RUN set -eux && \
shadow-utils \
pcre \
libcurl" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y install \
diff --git a/Dockerfiles/agent/windows/docker-entrypoint.ps1 b/Dockerfiles/agent/windows/docker-entrypoint.ps1
index 63e8a8e3d..409f3b8fe 100644
--- a/Dockerfiles/agent/windows/docker-entrypoint.ps1
+++ b/Dockerfiles/agent/windows/docker-entrypoint.ps1
@@ -40,7 +40,7 @@ function Update-Config-Var {
if (-not(Test-Path -Path $ConfigPath -PathType Leaf)) {
throw "**** Configuration file '$ConfigPath' does not exist"
}
-
+
if ($MaskList.Contains($VarName) -eq $true -And [string]::IsNullOrWhitespace($VarValue) -ne $true) {
Write-Host -NoNewline "** Updating '$ConfigPath' parameter ""$VarName"": '****'. Enable DEBUG_MODE to view value ..."
}
@@ -50,12 +50,12 @@ function Update-Config-Var {
if ([string]::IsNullOrWhitespace($VarValue)) {
if ((Get-Content $ConfigPath | %{$_ -match "^$VarName="}) -contains $true) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Where-Object {$_ -notmatch "^$VarName=" } |
Set-Content $ConfigPath
}
- Write-Host "removed"
+ Write-Host "removed"
return
}
@@ -64,7 +64,7 @@ function Update-Config-Var {
Write-Host "undefined"
return
}
-
+
if ($VarName -match '^TLS.*File$') {
$VarValue="$ZabbixUserHomeDir\enc\$VarValue"
}
@@ -75,7 +75,7 @@ function Update-Config-Var {
Write-Host updated
}
elseif ((Get-Content $ConfigPath | select-string -pattern "^[#;] $VarName=").length -gt 0) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Foreach-Object {
$_
if ($_ -match "^[#;] $VarName=") {
diff --git a/Dockerfiles/agent2/rhel/Dockerfile b/Dockerfiles/agent2/rhel/Dockerfile
index aac0c3a66..3ec869df4 100644
--- a/Dockerfiles/agent2/rhel/Dockerfile
+++ b/Dockerfiles/agent2/rhel/Dockerfile
@@ -60,7 +60,7 @@ RUN set -eux && \
smartmontools \
sudo \
libcurl" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y install \
diff --git a/Dockerfiles/agent2/windows/docker-entrypoint.ps1 b/Dockerfiles/agent2/windows/docker-entrypoint.ps1
index ccefce785..8193fe941 100644
--- a/Dockerfiles/agent2/windows/docker-entrypoint.ps1
+++ b/Dockerfiles/agent2/windows/docker-entrypoint.ps1
@@ -40,7 +40,7 @@ function Update-Config-Var {
if (-not(Test-Path -Path $ConfigPath -PathType Leaf)) {
throw "**** Configuration file '$ConfigPath' does not exist"
}
-
+
if ($MaskList.Contains($VarName) -eq $true -And [string]::IsNullOrWhitespace($VarValue) -ne $true) {
Write-Host -NoNewline "** Updating '$ConfigPath' parameter ""$VarName"": '****'. Enable DEBUG_MODE to view value ..."
}
@@ -50,12 +50,12 @@ function Update-Config-Var {
if ([string]::IsNullOrWhitespace($VarValue)) {
if ((Get-Content $ConfigPath | %{$_ -match "^$VarName="}) -contains $true) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Where-Object {$_ -notmatch "^$VarName=" } |
Set-Content $ConfigPath
}
- Write-Host "removed"
+ Write-Host "removed"
return
}
@@ -64,7 +64,7 @@ function Update-Config-Var {
Write-Host "undefined"
return
}
-
+
if ($VarName -match '^TLS.*File$') {
$VarValue="$ZabbixUserHomeDir\enc\$VarValue"
}
@@ -75,7 +75,7 @@ function Update-Config-Var {
Write-Host updated
}
elseif ((Get-Content $ConfigPath | select-string -pattern "^[#;] $VarName=").length -gt 0) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Foreach-Object {
$_
if ($_ -match "^[#;] $VarName=") {
diff --git a/Dockerfiles/build-mysql/alpine/Dockerfile b/Dockerfiles/build-mysql/alpine/Dockerfile
index 0afa7e539..04c4a7606 100644
--- a/Dockerfiles/build-mysql/alpine/Dockerfile
+++ b/Dockerfiles/build-mysql/alpine/Dockerfile
@@ -82,7 +82,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/centos/Dockerfile b/Dockerfiles/build-mysql/centos/Dockerfile
index 13f041175..85416069d 100644
--- a/Dockerfiles/build-mysql/centos/Dockerfile
+++ b/Dockerfiles/build-mysql/centos/Dockerfile
@@ -76,7 +76,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/ol/Dockerfile b/Dockerfiles/build-mysql/ol/Dockerfile
index 87b9d3cc2..5f7e6de1d 100644
--- a/Dockerfiles/build-mysql/ol/Dockerfile
+++ b/Dockerfiles/build-mysql/ol/Dockerfile
@@ -63,7 +63,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/rhel/Dockerfile b/Dockerfiles/build-mysql/rhel/Dockerfile
index 04e55ea2e..807f5cf92 100644
--- a/Dockerfiles/build-mysql/rhel/Dockerfile
+++ b/Dockerfiles/build-mysql/rhel/Dockerfile
@@ -86,7 +86,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/ubuntu/Dockerfile b/Dockerfiles/build-mysql/ubuntu/Dockerfile
index fcd28dc53..9e6a7bcd7 100644
--- a/Dockerfiles/build-mysql/ubuntu/Dockerfile
+++ b/Dockerfiles/build-mysql/ubuntu/Dockerfile
@@ -82,7 +82,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/alpine/Dockerfile b/Dockerfiles/build-pgsql/alpine/Dockerfile
index 00b631ea0..7b33120f3 100644
--- a/Dockerfiles/build-pgsql/alpine/Dockerfile
+++ b/Dockerfiles/build-pgsql/alpine/Dockerfile
@@ -82,7 +82,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/centos/Dockerfile b/Dockerfiles/build-pgsql/centos/Dockerfile
index 0986f73b5..920fa13c8 100644
--- a/Dockerfiles/build-pgsql/centos/Dockerfile
+++ b/Dockerfiles/build-pgsql/centos/Dockerfile
@@ -76,7 +76,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/ol/Dockerfile b/Dockerfiles/build-pgsql/ol/Dockerfile
index 0f1c3ec80..f11640b8e 100644
--- a/Dockerfiles/build-pgsql/ol/Dockerfile
+++ b/Dockerfiles/build-pgsql/ol/Dockerfile
@@ -63,7 +63,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/ubuntu/Dockerfile b/Dockerfiles/build-pgsql/ubuntu/Dockerfile
index e0ea85683..839e85619 100644
--- a/Dockerfiles/build-pgsql/ubuntu/Dockerfile
+++ b/Dockerfiles/build-pgsql/ubuntu/Dockerfile
@@ -82,7 +82,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/alpine/Dockerfile b/Dockerfiles/build-sqlite3/alpine/Dockerfile
index 01b5c86a0..e2b5becbf 100644
--- a/Dockerfiles/build-sqlite3/alpine/Dockerfile
+++ b/Dockerfiles/build-sqlite3/alpine/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/centos/Dockerfile b/Dockerfiles/build-sqlite3/centos/Dockerfile
index 805976349..3ffb3105b 100644
--- a/Dockerfiles/build-sqlite3/centos/Dockerfile
+++ b/Dockerfiles/build-sqlite3/centos/Dockerfile
@@ -66,7 +66,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/ol/Dockerfile b/Dockerfiles/build-sqlite3/ol/Dockerfile
index e00ebab9b..d17c94365 100644
--- a/Dockerfiles/build-sqlite3/ol/Dockerfile
+++ b/Dockerfiles/build-sqlite3/ol/Dockerfile
@@ -53,7 +53,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/rhel/Dockerfile b/Dockerfiles/build-sqlite3/rhel/Dockerfile
index 64e7a6650..dd8c74f59 100644
--- a/Dockerfiles/build-sqlite3/rhel/Dockerfile
+++ b/Dockerfiles/build-sqlite3/rhel/Dockerfile
@@ -76,7 +76,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/ubuntu/Dockerfile b/Dockerfiles/build-sqlite3/ubuntu/Dockerfile
index 3eb32c354..8d987d65f 100644
--- a/Dockerfiles/build-sqlite3/ubuntu/Dockerfile
+++ b/Dockerfiles/build-sqlite3/ubuntu/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/proxy-mysql/README.md b/Dockerfiles/proxy-mysql/README.md
index f95682e96..a9abfc711 100644
--- a/Dockerfiles/proxy-mysql/README.md
+++ b/Dockerfiles/proxy-mysql/README.md
@@ -113,7 +113,7 @@ This variable is port Zabbix server listening on. By default, value is `10051`.
This variable is IP or DNS name of MySQL server. By default, value is 'mysql-server'
### `DB_SERVER_PORT`
-
+
This variable is port of MySQL server. By default, value is '3306'.
### `MYSQL_USER`, `MYSQL_PASSWORD`, `MYSQL_USER_FILE`, `MYSQL_PASSWORD_FILE`
diff --git a/Dockerfiles/proxy-mysql/rhel/Dockerfile b/Dockerfiles/proxy-mysql/rhel/Dockerfile
index d0af1705f..2add93047 100644
--- a/Dockerfiles/proxy-mysql/rhel/Dockerfile
+++ b/Dockerfiles/proxy-mysql/rhel/Dockerfile
@@ -73,7 +73,7 @@ RUN set -eux && \
pcre \
gzip \
unixODBC" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y module enable mysql && \
diff --git a/Dockerfiles/proxy-sqlite3/rhel/Dockerfile b/Dockerfiles/proxy-sqlite3/rhel/Dockerfile
index cfb51d7c6..2e3f2cf7f 100644
--- a/Dockerfiles/proxy-sqlite3/rhel/Dockerfile
+++ b/Dockerfiles/proxy-sqlite3/rhel/Dockerfile
@@ -70,7 +70,7 @@ RUN set -eux && \
pcre \
sqlite-libs \
unixODBC" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y install \
diff --git a/Dockerfiles/server-mysql/rhel/Dockerfile b/Dockerfiles/server-mysql/rhel/Dockerfile
index d9ed371ae..5627aad01 100644
--- a/Dockerfiles/server-mysql/rhel/Dockerfile
+++ b/Dockerfiles/server-mysql/rhel/Dockerfile
@@ -74,7 +74,7 @@ RUN set -eux && \
pcre \
gzip \
unixODBC" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y module enable mysql && \
diff --git a/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh b/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh
index 107ee4daf..167872409 100755
--- a/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh b/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh
index 107ee4daf..167872409 100755
--- a/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh b/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh
index 107ee4daf..167872409 100755
--- a/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh b/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh
index 410b66f09..a4110dc06 100755
--- a/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile b/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile
index 77d9dcbf6..82eeb1b75 100644
--- a/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile
+++ b/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile
@@ -104,7 +104,7 @@ RUN set -eux && \
rm -rf /var/lib/apt/lists/*
EXPOSE 8080/TCP 8443/TCP
-
+
WORKDIR /usr/share/zabbix
COPY ["docker-entrypoint.sh", "/usr/bin/"]
diff --git a/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/rhel/Dockerfile b/Dockerfiles/web-nginx-mysql/rhel/Dockerfile
index aa13a0045..25a859b7b 100644
--- a/Dockerfiles/web-nginx-mysql/rhel/Dockerfile
+++ b/Dockerfiles/web-nginx-mysql/rhel/Dockerfile
@@ -66,7 +66,7 @@ RUN set -eux && \
php-mbstring \
php-mysqlnd \
php-xml" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y module enable mysql && \
diff --git a/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index 72b99cfc1..0141e1985 100644
--- a/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -17,20 +17,20 @@ $ZBX_SERVER_PORT = getenv('ZBX_SERVER_PORT');
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -41,10 +41,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-// Uncomment to override the default paths to SP private key, SP and IdP X.509 certificates, and to set extra settings.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/README.md b/README.md
index 2985e3caf..1abfd57e3 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,10 @@

-[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build.yml)
-[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build.yml)
+[](https://securityscorecards.dev/viewer/?uri=github.com/zabbix/zabbix-docker)
+
+[](https://sonarcloud.io/summary/new_code?id=zabbix_zabbix-docker)
-[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build_windows.yml)
+[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build.yml)
[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build_windows.yml)
# What is Zabbix?
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..4b594df62
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,32 @@
+# Disclosure policy
+
+In Zabbix we use the term "**responsible disclosure**", which means we have a policy on how we are disclosing all security issues that come to our attention, but only after the issues has been resolved and all customers with support contracts are given time to upgrade or patch their installations.
+
+We kindly ask that when you are reporting a security issue, you follow the same guidelines and share the details only with the Zabbix Security team.
+
+# Before reporting the issue:
+
+Make sure that the issue you are submitting is not related to server configuration, 3rd party scripts and utilities. In order to avoid any possible issues with server configuration we advise Zabbix users to read [Best practices for secure Zabbix setup](https://www.zabbix.com/documentation/current/manual/installation/requirements/best_practices).
+
+# How to report a security issue?
+
+[Create a new issue](https://support.zabbix.com/secure/CreateIssue.jspa) in the Zabbix Security Reports (ZBXSEC) section of the public bug tracker describing the problem (and a proposed solution if possible) in detail. This way we can ensure that only Zabbix security team and the reporter have access to the case.
+
+The following information will be helpful for Zabbix Security team:
+
+- Date and time when you identified the security defect.
+- Affected Zabbix version range.
+- Type of security issue you are reporting, e.g.: XSS, CSRF, SQLi, RCE.
+- Affected components, e.g.: Image, Frontend, Server, Agent, API.
+- Any details you can provide, e.g. screenshots, screen recordings, http(s) transaction logs, POC exploits (please do not share any evidence via unauthenticated file sharing services and avoid sharing sensitive information, as if Zabbix Security team decides that this issue does not fit Security defect description it might be moved to ZBX project and the issue will be visible to all users).
+- Step by step instructions to reproduce the issue as the problem might not be easily identifiable.
+
+# How Zabbix deals with reported security issues:
+
+1. Zabbix Security team reviews the issue and evaluates its potential impact.
+2. If the security issue is found not to be related to security then the issue will be moved to ZBX project.
+3. Zabbix security team works on the issue to provide a solution and keeps all details on the problem until the next version of Zabbix is out.
+4. New images are created and made available for download on [https://www.zabbix.com/container_images](https://www.zabbix.com/container_images), [https://hub.docker.com/u/zabbix](http://hub.docker.com/r/zabbix/) and [Red Hat Certified Container Catalog](https://catalog.redhat.com/software/containers/search?vendor_name=Zabbix%20Sia&p=1)
+5. Zabbix requests [CVE identifiers](https://cve.mitre.org/) for the security issue.
+6. Clients with valid support agreements are emailed giving a period of time when it is possible to upgrade before the issue becomes known to the public.
+7. A public announcement for the community is made.
diff --git a/build.json b/build.json
index 137b6b3f0..cbcc2d3e3 100644
--- a/build.json
+++ b/build.json
@@ -6,38 +6,41 @@
"linux/arm64",
"linux/s390x"
],
- "ol": [
- "linux/amd64",
- "linux/arm64"
- ],
- "ubuntu": [
- "linux/amd64",
- "linux/arm/v7",
- "linux/arm64",
- "linux/s390x"
- ],
"centos": [
"linux/amd64",
"linux/arm64",
"linux/ppc64le"
+ ],
+ "ol": [
+ "linux/amd64",
+ "linux/arm64"
+ ],
+ "rhel": [
+ "X64"
+ ],
+ "ubuntu": [
+ "linux/amd64",
+ "linux/arm/v7",
+ "linux/arm64",
+ "linux/s390x"
]
},
"os-windows": {
- "windows-2022": "ltsc2022",
- "windows-2019": "ltsc2019"
+ "windows-2019": "ltsc2019",
+ "windows-2022": "ltsc2022"
},
"components": {
- "agent": "build-mysql",
- "agent2": "build-mysql",
- "java-gateway": "build-mysql",
- "proxy-mysql": "build-mysql",
- "proxy-sqlite3": "build-sqlite3",
- "server-mysql": "build-mysql",
- "server-pgsql": "build-pgsql",
- "snmptraps": "",
- "web-apache-mysql": "build-mysql",
- "web-apache-pgsql": "build-pgsql",
- "web-nginx-mysql": "build-mysql",
- "web-nginx-pgsql": "build-mysql"
+ "agent": { "base": "build-mysql", "rhel": true },
+ "agent2": { "base": "build-mysql", "rhel": true },
+ "java-gateway": { "base": "build-mysql", "rhel": true },
+ "proxy-mysql": { "base": "build-mysql", "rhel": true },
+ "proxy-sqlite3": { "base": "build-sqlite3", "rhel": true },
+ "server-mysql": { "base": "build-mysql", "rhel": true },
+ "server-pgsql": { "base": "build-pgsql", "rhel": false },
+ "snmptraps": { "base": "", "rhel": true },
+ "web-apache-mysql": { "base": "build-mysql", "rhel": false },
+ "web-apache-pgsql": { "base": "build-pgsql", "rhel": false },
+ "web-nginx-mysql": { "base": "build-mysql", "rhel": true },
+ "web-nginx-pgsql": { "base": "build-mysql", "rhel": false }
}
-}
\ No newline at end of file
+}
diff --git a/env_vars/mysql_init/init_proxy_db.sql b/env_vars/mysql_init/init_proxy_db.sql
index ca8c6e4c8..0f01f932a 100644
--- a/env_vars/mysql_init/init_proxy_db.sql
+++ b/env_vars/mysql_init/init_proxy_db.sql
@@ -1,2 +1,2 @@
CREATE DATABASE IF NOT EXISTS `zabbix_proxy`;
-GRANT ALL ON `zabbix_proxy`.* TO 'zabbix'@'%';
\ No newline at end of file
+GRANT ALL ON `zabbix_proxy`.* TO 'zabbix'@'%';
diff --git a/kubernetes.yaml b/kubernetes.yaml
index c8ca99b16..59a80ce83 100644
--- a/kubernetes.yaml
+++ b/kubernetes.yaml
@@ -586,12 +586,6 @@ spec:
periodSeconds: 5
timeoutSeconds: 3
failureThreshold: 40
- livenessProbe:
- tcpSocket:
- port: 10051
- timeoutSeconds: 3
- failureThreshold: 3
- periodSeconds: 10
securityContext:
capabilities: {}
privileged: false