diff --git a/.github/.pre-commit-config.yaml b/.github/.pre-commit-config.yaml
deleted file mode 100644
index 07d586e8d..000000000
--- a/.github/.pre-commit-config.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
- - repo: https://github.com/gitleaks/gitleaks
- rev: v8.16.3
- hooks:
- - id: gitleaks
- - repo: https://github.com/jumanjihouse/pre-commit-hooks
- rev: 3.0.0
- hooks:
- - id: shellcheck
- - repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
- hooks:
- - id: end-of-file-fixer
- - id: trailing-whitespace
\ No newline at end of file
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 000000000..5a9916847
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+* @dotneft
diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE
index 337bf9058..a57b6ed28 100644
--- a/.github/ISSUE_TEMPLATE
+++ b/.github/ISSUE_TEMPLATE
@@ -34,4 +34,4 @@
```paste below
-```
\ No newline at end of file
+```
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
new file mode 100644
index 000000000..42af9afce
--- /dev/null
+++ b/.github/workflows/dependency-review.yml
@@ -0,0 +1,31 @@
+# Dependency Review Action
+#
+# This Action will scan dependency manifest files that change as part of a Pull Request,
+# surfacing known-vulnerable versions of the packages declared or updated in the PR.
+# Once installed, if the workflow run is marked as required,
+# PRs introducing known-vulnerable packages will be blocked from merging.
+#
+# Source repository: https://github.com/actions/dependency-review-action
+name: 'Dependency Review'
+on: [pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ dependency-review:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Harden Runner
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ disable-sudo: true
+ egress-policy: block
+ allowed-endpoints: >
+ api.github.com:443
+ github.com:443
+
+ - name: 'Checkout Repository'
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - name: 'Dependency Review'
+ uses: actions/dependency-review-action@80f10bf419f34980065523f5efca7ebed17576aa # v4.1.0
diff --git a/.github/workflows/dockerhub_description.yml b/.github/workflows/dockerhub_description.yml
new file mode 100644
index 000000000..80be21c91
--- /dev/null
+++ b/.github/workflows/dockerhub_description.yml
@@ -0,0 +1,70 @@
+name: DockerHub Description
+
+on:
+ push:
+ branches:
+ - 'trunk'
+ paths:
+ - 'Dockerfiles/*/README.md'
+ - '.github/workflows/dockerhub_description.yml'
+ workflow_dispatch:
+
+env:
+ DOCKER_REPOSITORY: "zabbix"
+ IMAGES_PREFIX: "zabbix-"
+ DOCKERFILES_DIRECTORY: "./Dockerfiles"
+
+permissions:
+ contents: read
+
+jobs:
+ main:
+ name: Update description
+ runs-on: ubuntu-latest
+ env:
+ DOCKER_REPOSITORY: "zabbix"
+ permissions:
+ contents: read
+ strategy:
+ fail-fast: false
+ matrix:
+ component:
+ - build-base
+ - build-mysql
+ - build-pgsql
+ - build-sqlite3
+ - agent
+ - agent2
+ - java-gateway
+ - proxy-mysql
+ - proxy-sqlite3
+ - server-mysql
+ - server-pgsql
+ - snmptraps
+ - web-apache-mysql
+ - web-apache-pgsql
+ - web-nginx-mysql
+ - web-nginx-pgsql
+ - web-service
+ steps:
+ - name: Block egress traffic
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ disable-sudo: true
+ egress-policy: block
+ allowed-endpoints: >
+ github.com:443
+ hub.docker.com:443
+
+ - name: Checkout repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ fetch-depth: 1
+
+ - name: Update DockerHub repo description (zabbix-${{ matrix.component }})
+ uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae # v4.0.0
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+ repository: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.component }}
+ readme-filepath: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.component }}/README.md
diff --git a/.github/workflows/images_build.yml b/.github/workflows/images_build.yml
index 0b4fc9b4b..7b997705e 100644
--- a/.github/workflows/images_build.yml
+++ b/.github/workflows/images_build.yml
@@ -66,7 +66,7 @@ jobs:
github.com:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -87,7 +87,7 @@ jobs:
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- os_list=$(jq -r '.["os-linux"] | keys | [ .[] | tostring ] | @json' "$MATRIX_FILE")
+ os_list=$(jq -r '.["os-linux"] | keys | map(select(. != "rhel")) | [ .[] | tostring ] | @json' "$MATRIX_FILE")
echo "::group::Operating System List"
echo "$os_list"
@@ -95,25 +95,12 @@ jobs:
echo "list=$os_list" >> $GITHUB_OUTPUT
- - name: Prepare Platform list
- id: platform_list
- env:
- MATRIX_FILE: ${{ env.MATRIX_FILE }}
- run: |
- platform_list=$(jq -r '.["os-linux"] | tostring | @json' "$MATRIX_FILE")
-
- echo "::group::Platform List"
- echo "$platform_list"
- echo "::endgroup::"
-
- echo "list=$platform_list" >> $GITHUB_OUTPUT
-
- name: Prepare Database engine list
id: database
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- database_list=$(jq -r '[.components | values[] ] | sort | unique | del(.. | select ( . == "" ) ) | [ .[] | tostring ] | @json' "$MATRIX_FILE")
+ database_list=$(jq -r '[.components | values[].base ] | sort | unique | del(.. | select ( . == "" ) ) | @json' "$MATRIX_FILE")
echo "::group::Database List"
echo "$database_list"
@@ -126,7 +113,7 @@ jobs:
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- component_list=$(jq -r '.components | keys | [ .[] | tostring ] | @json' "$MATRIX_FILE")
+ component_list=$(jq -r '.components | keys | @json' "$MATRIX_FILE")
echo "::group::Zabbix Component List"
echo "$component_list"
@@ -184,16 +171,13 @@ jobs:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- archive.ubuntu.com:443
archive.ubuntu.com:80
atl.mirrors.knownhost.com:443
atl.mirrors.knownhost.com:80
auth.docker.io:443
cdn03.quay.io:443
- centos-distro.1gservers.com:80
centos-stream-distro.1gservers.com:443
centos-stream-distro.1gservers.com:80
- centos.mirror.shastacoe.net:80
dfw.mirror.rackspace.com:443
dfw.mirror.rackspace.com:80
dl-cdn.alpinelinux.org:443
@@ -204,8 +188,6 @@ jobs:
ftp-nyc.osuosl.org:80
ftp-osl.osuosl.org:443
ftp-osl.osuosl.org:80
- ftp.agdsn.de:443
- ftp.osuosl.org:80
ftp.plusline.net:443
ftp.plusline.net:80
ftpmirror.your.org:80
@@ -219,17 +201,13 @@ jobs:
mirror-mci.yuki.net.uk:80
mirror.arizona.edu:443
mirror.arizona.edu:80
- mirror.ash.fastserv.com:80
mirror.dogado.de:443
mirror.dogado.de:80
- mirror.ette.biz:80
mirror.facebook.net:443
mirror.facebook.net:80
mirror.fcix.net:443
mirror.hoobly.com:443
- mirror.hoobly.com:80
mirror.math.princeton.edu:443
- mirror.metrocast.net:80
mirror.netzwerge.de:443
mirror.pilotfiber.com:443
mirror.pilotfiber.com:80
@@ -239,14 +217,12 @@ jobs:
mirror.scaleuptech.com:80
mirror.servaxnet.com:443
mirror.servaxnet.com:80
- mirror.sfo12.us.leaseweb.net:80
mirror.siena.edu:80
mirror.stream.centos.org:443
mirror.stream.centos.org:80
mirror.team-cymru.com:443
mirror.team-cymru.com:80
mirror1.hs-esslingen.de:443
- mirrorlist.centos.org:80
mirrors.centos.org:443
mirrors.fedoraproject.org:443
mirrors.fedoraproject.org:80
@@ -257,38 +233,27 @@ jobs:
mirrors.sonic.net:443
mirrors.wcupa.edu:443
mirrors.wcupa.edu:80
- mirrors.xtom.com:80
- mirrors.xtom.de:443
mirrors.xtom.de:80
na.edge.kernel.org:443
nocix.mm.fcix.net:443
oauth2.sigstore.dev:443
objects.githubusercontent.com:443
- ports.ubuntu.com:443
ports.ubuntu.com:80
production.cloudflare.docker.com:443
quay.io:443
registry-1.docker.io:443
rekor.sigstore.dev:443
repo.ialab.dsu.edu:443
- repo1.sea.innoscale.net:80
repos.eggycrew.com:443
repos.eggycrew.com:80
- scientificlinux.physik.uni-muenchen.de:80
- security.ubuntu.com:443
security.ubuntu.com:80
- southfront.mm.fcix.net:80
tuf-repo-cdn.sigstore.dev:443
- tx-mirror.tier.net:80
uvermont.mm.fcix.net:443
- volico.mm.fcix.net:80
- www.gtlib.gatech.edu:80
yum.oracle.com:443
ziply.mm.fcix.net:443
- ziply.mm.fcix.net:80
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -302,18 +267,18 @@ jobs:
run: cosign version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v3
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
driver-opts: image=moby/buildkit:master
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -335,7 +300,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_BUILD_NAME }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -352,7 +317,7 @@ jobs:
- name: Build and publish image
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ env.BASE_BUILD_NAME }}/${{ matrix.os }}
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ env.BASE_BUILD_NAME }}/${{ matrix.os }}/Dockerfile
@@ -401,7 +366,7 @@ jobs:
echo "$DIGEST" > "$CACHE_FILE_NAME"
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -449,7 +414,7 @@ jobs:
rekor.sigstore.dev:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -463,18 +428,18 @@ jobs:
run: cosign version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v3
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
driver-opts: image=moby/buildkit:master
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -496,7 +461,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.build }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -512,7 +477,7 @@ jobs:
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
- name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.os }}
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -557,7 +522,7 @@ jobs:
- name: Build ${{ matrix.build }}/${{ matrix.os }} and push
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}/Dockerfile
@@ -606,7 +571,7 @@ jobs:
echo "$DIGEST" > $CACHE_FILE_NAME
- name: Caching SHA256 tag of the image
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ matrix.build }}_${{ matrix.os }}
key: ${{ matrix.build }}-${{ matrix.os }}-${{ github.run_id }}
@@ -633,62 +598,45 @@ jobs:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- archive.ubuntu.com:443
- archive.ubuntu.com:80
+ auth.docker.io:443
+ dl-cdn.alpinelinux.org:443
+ github.com:443
+ index.docker.io:443
+ production.cloudflare.docker.com:443
+ registry-1.docker.io:443
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+ api.github.com:443
atl.mirrors.knownhost.com:443
atl.mirrors.knownhost.com:80
auth.docker.io:443
cdn03.quay.io:443
- centos-distro.1gservers.com:80
centos-stream-distro.1gservers.com:443
centos-stream-distro.1gservers.com:80
- centos.mirror.shastacoe.net:80
d2lzkl7pfhq30w.cloudfront.net:443
- deb.debian.org:80
- dfw.mirror.rackspace.com:443
- dfw.mirror.rackspace.com:80
- dl-cdn.alpinelinux.org:443
- download.cf.centos.org:443
- download.cf.centos.org:80
- epel.mirror.constant.com:443
epel.mirror.constant.com:80
forksystems.mm.fcix.net:80
ftp-nyc.osuosl.org:443
ftp-nyc.osuosl.org:80
ftp-osl.osuosl.org:443
ftp-osl.osuosl.org:80
- ftp.agdsn.de:443
- ftp.osuosl.org:80
- ftp.plusline.net:443
ftp.plusline.net:80
ftpmirror.your.org:80
- fulcio.sigstore.dev:443
github.com:443
iad.mirror.rackspace.com:443
- iad.mirror.rackspace.com:80
index.docker.io:443
ix-denver.mm.fcix.net:443
- keyserver.ubuntu.com:11371
- lesnet.mm.fcix.net:443
mirror-mci.yuki.net.uk:443
- mirror-mci.yuki.net.uk:80
mirror.23m.com:80
- mirror.arizona.edu:443
mirror.arizona.edu:80
- mirror.ash.fastserv.com:80
mirror.dal.nexril.net:80
mirror.de.leaseweb.net:80
- mirror.dogado.de:443
mirror.dogado.de:80
- mirror.ette.biz:80
- mirror.facebook.net:443
mirror.facebook.net:80
- mirror.fcix.net:443
- mirror.hoobly.com:443
mirror.hoobly.com:80
- mirror.math.princeton.edu:443
mirror.math.princeton.edu:80
- mirror.metrocast.net:80
mirror.netcologne.de:443
mirror.netzwerge.de:443
mirror.pilotfiber.com:443
@@ -696,73 +644,73 @@ jobs:
mirror.rackspace.com:443
mirror.rackspace.com:80
mirror.scaleuptech.com:443
- mirror.scaleuptech.com:80
mirror.servaxnet.com:443
mirror.servaxnet.com:80
mirror.sfo12.us.leaseweb.net:80
mirror.siena.edu:80
mirror.steadfastnet.com:80
- mirror.stream.centos.org:443
- mirror.stream.centos.org:80
mirror.team-cymru.com:443
mirror.team-cymru.com:80
mirror.umd.edu:443
mirror1.hs-esslingen.de:443
- mirrorlist.centos.org:80
mirrors.centos.org:443
mirrors.fedoraproject.org:443
- mirrors.fedoraproject.org:80
mirrors.iu13.net:443
mirrors.iu13.net:80
- mirrors.mit.edu:443
mirrors.ocf.berkeley.edu:443
- mirrors.ocf.berkeley.edu:80
- mirrors.sonic.net:443
mirrors.sonic.net:80
mirrors.syringanetworks.net:80
mirrors.vcea.wsu.edu:80
- mirrors.wcupa.edu:443
mirrors.wcupa.edu:80
- mirrors.xtom.com:80
- mirrors.xtom.de:443
mirrors.xtom.de:80
na.edge.kernel.org:443
- nginx.org:443
- nginx.org:80
nnenix.mm.fcix.net:80
- nocix.mm.fcix.net:443
- oauth2.sigstore.dev:443
- objects.githubusercontent.com:443
ohioix.mm.fcix.net:80
- ports.ubuntu.com:443
- ports.ubuntu.com:80
production.cloudflare.docker.com:443
pubmirror1.math.uh.edu:443
pubmirror3.math.uh.edu:80
quay.io:443
registry-1.docker.io:443
- rekor.sigstore.dev:443
- repo.ialab.dsu.edu:443
repo.ialab.dsu.edu:80
- repo1.sea.innoscale.net:80
- repos.eggycrew.com:443
repos.eggycrew.com:80
- scientificlinux.physik.uni-muenchen.de:80
- security.ubuntu.com:443
- security.ubuntu.com:80
- southfront.mm.fcix.net:80
- tuf-repo-cdn.sigstore.dev:443
- tx-mirror.tier.net:80
- uvermont.mm.fcix.net:443
uvermont.mm.fcix.net:80
- volico.mm.fcix.net:80
- www.gtlib.gatech.edu:80
- yum.oracle.com:443
ziply.mm.fcix.net:443
- ziply.mm.fcix.net:80
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+ oauth2.sigstore.dev:443
+ api.github.com:443
+ auth.docker.io:443
+ github.com:443
+ index.docker.io:443
+ production.cloudflare.docker.com:443
+ registry-1.docker.io:443
+ yum.oracle.com:443
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
+ api.github.com:443
+ archive.ubuntu.com:80
+ auth.docker.io:443
+ deb.debian.org:80
+ github.com:443
+ index.docker.io:443
+ keyserver.ubuntu.com:11371
+ nginx.org:443
+ nginx.org:80
+ ports.ubuntu.com:80
+ production.cloudflare.docker.com:443
+ registry-1.docker.io:443
+ security.ubuntu.com:80
+ fulcio.sigstore.dev:443
+ objects.githubusercontent.com:443
+ tuf-repo-cdn.sigstore.dev:443
+ rekor.sigstore.dev:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -776,18 +724,18 @@ jobs:
run: cosign version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v3
+ uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
with:
image: tonistiigi/binfmt:latest
platforms: all
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
+ uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
with:
driver-opts: image=moby/buildkit:master
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -828,7 +776,7 @@ jobs:
MATRIX_BUILD: ${{ matrix.build }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\"" "$MATRIX_FILE")
+ BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\".base" "$MATRIX_FILE")
echo "::group::Base Build Image"
echo "$BUILD_BASE"
@@ -838,7 +786,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX}}${{ matrix.build }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -854,7 +802,7 @@ jobs:
latest=${{ (needs.init_build.outputs.current_branch != 'trunk') && (matrix.os == 'alpine') && ( needs.init_build.outputs.is_default_branch == 'true' ) }}
- name: Download SHA256 tag of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
if: ${{ matrix.build != 'snmptraps' }}
with:
path: ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.os }}
@@ -918,7 +866,7 @@ jobs:
- name: Build and push image
id: docker_build
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 # v5.1.0
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}
file: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/${{ matrix.os }}/Dockerfile
diff --git a/.github/workflows/images_build_rhel.yml b/.github/workflows/images_build_rhel.yml
index 89623de4c..fa803125f 100644
--- a/.github/workflows/images_build_rhel.yml
+++ b/.github/workflows/images_build_rhel.yml
@@ -4,7 +4,22 @@ on:
release:
types:
- published
+ push:
+ branches:
+ - '[0-9]+.[0-9]+'
+ - 'trunk'
+ paths:
+ - 'Dockerfiles/*/rhel/*'
+ - 'build.json'
+ - '!**/README.md'
+ - '.github/workflows/images_build_rhel.yml'
workflow_dispatch:
+ inputs:
+ publish_images:
+ description: 'Publish images'
+ required: true
+ default: false
+ type: boolean
defaults:
run:
@@ -14,13 +29,14 @@ permissions:
contents: read
env:
- AUTO_PUSH_IMAGES: ${{ contains(fromJSON('["workflow_dispatch"]'), github.event_name) && 'false' || vars.AUTO_PUSH_IMAGES }}
+ AUTO_PUSH_IMAGES: ${{ contains(fromJSON('["workflow_dispatch", "push"]'), github.event_name) && 'false' || vars.AUTO_PUSH_IMAGES }}
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
TRUNK_GIT_BRANCH: "refs/heads/trunk"
IMAGES_PREFIX: "zabbix-"
BASE_BUILD_NAME: "build-base"
+ MATRIX_FILE: "build.json"
DOCKERFILES_DIRECTORY: "Dockerfiles"
OIDC_ISSUER: "https://token.actions.githubusercontent.com"
@@ -31,6 +47,9 @@ env:
PREFLIGHT_IMAGE: "quay.io/opdev/preflight:stable"
PFLT_LOGLEVEL: "warn"
PFLT_ARTIFACTS: "/tmp/artifacts"
+ IMAGE_DIR: "/tmp/images"
+
+ RHEL_BUILD: "true"
jobs:
init_build:
@@ -38,11 +57,15 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
+ actions: write
outputs:
+ platforms: ${{ steps.platform_list.outputs.list }}
+ database: ${{ steps.database.outputs.list }}
components: ${{ steps.components.outputs.list }}
is_default_branch: ${{ steps.branch_info.outputs.is_default_branch }}
current_branch: ${{ steps.branch_info.outputs.current_branch }}
sha_short: ${{ steps.branch_info.outputs.sha_short }}
+ secret_prefix: ${{ steps.branch_info.outputs.secret_prefix }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
@@ -50,12 +73,64 @@ jobs:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
+ api.github.com:443
github.com:443
+ objects.githubusercontent.com:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
+ sparse-checkout: ${{ env.MATRIX_FILE }}
+
+ - name: Check ${{ env.MATRIX_FILE }} file
+ id: build_exists
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ if [[ ! -f "$MATRIX_FILE" ]]; then
+ echo "::error::File $MATRIX_FILE is missing"
+ exit 1
+ fi
+
+ - name: Prepare Platform list
+ id: platform_list
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ platform_list=$(jq -r '.["os-linux"].rhel | @json' "$MATRIX_FILE")
+
+ echo "::group::Platform List"
+ echo "$platform_list"
+ echo "::endgroup::"
+
+ echo "list=$platform_list" >> $GITHUB_OUTPUT
+
+ - name: Prepare Database engine list
+ id: database
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ database_list=$(jq -r '[.components | map_values(select(.rhel == true)) | values[].base ] | sort | unique | del(.. | select ( . == "" ) ) | @json' "$MATRIX_FILE")
+
+ echo "::group::Database List"
+ echo "$database_list"
+ echo "::endgroup::"
+
+ echo "list=$database_list" >> $GITHUB_OUTPUT
+
+ - name: Prepare Zabbix component list
+ id: components
+ env:
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
+ run: |
+ component_list=$(jq -r '.components | map_values(select(.rhel == true)) | keys | @json' "$MATRIX_FILE")
+
+ echo "::group::Zabbix Component List"
+ echo "$component_list"
+ echo "::endgroup::"
+
+ echo "list=$component_list" >> $GITHUB_OUTPUT
- name: Get branch info
id: branch_info
@@ -77,30 +152,39 @@ jobs:
result=true
fi
- echo "::group::Branch data"
+ echo "::group::Branch metadata"
echo "is_default_branch - $result"
echo "current_branch - $github_ref"
+ echo "secret_prefix=RHEL_${github_ref//.}"
echo "sha_short - $sha_short"
echo "::endgroup::"
echo "is_default_branch=$result" >> $GITHUB_OUTPUT
echo "current_branch=$github_ref" >> $GITHUB_OUTPUT
+ echo "secret_prefix=RHEL_${github_ref//.}" >> $GITHUB_OUTPUT
echo "sha_short=$sha_short" >> $GITHUB_OUTPUT
-
- - name: Prepare Zabbix component list
- id: components
+ - name: Cleanup cache
+ shell: bash
env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
- CURRENT_BRANCH: ${{ steps.branch_info.outputs.current_branch }}
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ REPO: ${{ github.repository }}
+ BRANCH: ${{ steps.branch_info.outputs.current_branch }}
+ GH_RUN_ID: ${{ github.run_id }}
run: |
- component_list=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components | keys | @json")
+ gh extension install actions/gh-actions-cache
- echo "::group::Zabbix Component List"
- echo "$component_list"
- echo "::endgroup::"
+ cache_keys=$(gh actions-cache list -R "${REPO}" -B "${BRANCH}" -L 100 --sort created-at --order desc | cut -f 1)
- echo "list=$component_list" >> $GITHUB_OUTPUT
+ ## Setting this to not fail the workflow while deleting cache keys
+ set +e
+ echo "Deleting caches..."
+ for cache_key in $cache_keys
+ do
+ if [[ "$cache_key" == *"${GH_RUN_ID}" ]]; then
+ gh actions-cache delete $cache_key -R "${REPO}" -B "${BRANCH}" --confirm
+ fi
+ done
build_base:
timeout-minutes: 30
@@ -110,13 +194,13 @@ jobs:
fail-fast: false
matrix:
build: [build-base]
- arch: [X64, ARM64]
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
@@ -137,15 +221,15 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
tags: |
type=sha,suffix=-${{ steps.lc.outputs.arch }}
- - name: Build Zabbix Build Base
+ - name: Build image
id: build_image
- uses: redhat-actions/buildah-build@v2
+ uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
layers: false
@@ -154,28 +238,83 @@ jobs:
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
extra-args: |
--pull
+ --iidfile=${{ github.workspace }}/iidfile
- - name: Image digest
+ - name: Prepare image metadata
+ id: image_metadata
env:
- IMAGE_TAG: ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
+ IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
run: |
- DIGEST=$(podman inspect ${IMAGE_TAG} --format "{{ index .RepoDigests 0}}" | cut -d '@' -f2)
- echo "::group::Image digest"
- echo "$DIGEST"
+ TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
+
+ echo "::group::Image tag"
+ echo "image_tag=$IMAGE_TAG"
+ echo "::endgroup::"
+ echo "::group::Image Tag ID"
+ echo "tag_id=$TAG_ID"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$CACHE_FILE_NAME"
echo "::endgroup::"
- echo "$DIGEST" > "$CACHE_FILE_NAME"
+ echo "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
+ echo "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
- - name: Cache image digest
- uses: actions/cache@v4
+ echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
+ echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
+
+ - name: Cache image metadata
+ uses: actions/cache/save@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ path: |
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
+ - name: Push image to local storage
+ id: push_image
+ env:
+ IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+ echo "Image ${IMAGE_TAG} location: \"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
+ podman push "${IMAGE_TAG}" dir:"${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
+ - name: Post build image
+ if: ${{ success() || failure() }}
+ env:
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+
+ rm -rf "$GITHUB_WORKSPACE/iidfile"
+
+ echo "Removing working containers"
+ buildah rm -a 2>/dev/null || true
+ echo "Removing container data in storage not controlled by podman"
+ podman system prune --external 2>/dev/null
+ echo "Removing all unused container data with volumes"
+ podman system prune -a --volumes -f 2>/dev/null
+ echo "Reseting podman storage to default state"
+ podman system reset -f 2>/dev/null || true
+
+ echo "::endgroup::"
+
+ - name: Check on failures
+ if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
+ env:
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ run: |
+ echo "::group::Removing orphaned image"
+ rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
build_base_database:
timeout-minutes: 180
needs: [ "build_base", "init_build"]
@@ -183,15 +322,14 @@ jobs:
strategy:
fail-fast: false
matrix:
- build: [build-mysql, build-sqlite3]
- arch: [X64, ARM64]
+ build: ${{ fromJson(needs.init_build.outputs.database) }}
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
- id-token: write
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
@@ -202,41 +340,54 @@ jobs:
run: |
echo "arch=${ARCH,,}" >> $GITHUB_OUTPUT
- - name: Generate tags
- id: meta
- uses: docker/metadata-action@v5
+ - name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
- tags: |
- type=sha,suffix=-${{ steps.lc.outputs.arch }}
-
- - name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
- uses: actions/cache@v4
- with:
- path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ path: |
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
- - name: Retrieve ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} SHA256 tag
+ - name: Pull ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} image
id: base_build
env:
MATRIX_ARCH: ${{ matrix.arch }}
BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
+ BASE_TAG_ID=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag_id")
+ BASE_IMAGE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag")
- echo "::group::Base build image information"
- echo "base_tag=${BASE_TAG}"
- echo "base_build_image=${BUILD_BASE_IMAGE}"
+ echo "::group::Pull image"
+ echo "podman pull dir:\"${IMAGE_DIR}/${BASE_TAG_ID}\""
+ podman pull dir:"${IMAGE_DIR}/${BASE_TAG_ID}"
echo "::endgroup::"
- echo "base_tag=${BASE_TAG}" >> $GITHUB_OUTPUT
- echo "base_build_image=${BUILD_BASE_IMAGE}" >> $GITHUB_OUTPUT
+ echo "::group::Tag image"
+ echo "podman tag \"${BASE_TAG_ID}\" \"${BASE_IMAGE_TAG}\""
+ podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
+ echo "::endgroup::"
- - name: Build Zabbix Build Base
+ echo "::group::SHA256 tag"
+ DIGEST=$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')
+ BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
+
+ echo "base_build_image=${BASE_BUILD_IMAGE}"
+ echo "::endgroup::"
+
+ echo "base_build_image=${BASE_BUILD_IMAGE}" >> $GITHUB_OUTPUT
+
+ - name: Generate tags
+ id: meta
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
+ with:
+ images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
+ tags: |
+ type=sha,suffix=-${{ steps.lc.outputs.arch }}
+
+ - name: Build image
id: build_image
- uses: redhat-actions/buildah-build@v2
+ uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
layers: false
@@ -244,28 +395,84 @@ jobs:
containerfiles: |
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
+ extra-args: |
+ --iidfile=${{ github.workspace }}/iidfile
- - name: Image digest
+ - name: Prepare image metadata
+ id: image_metadata
env:
- IMAGE_TAG: ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
+ IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.arch }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
run: |
- DIGEST=$(podman inspect ${IMAGE_TAG} --format "{{ index .RepoDigests 0}}" | cut -d '@' -f2)
- echo "::group::Image digest"
- echo "$DIGEST"
+ TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
+
+ echo "::group::Image tag"
+ echo "image_tag=$IMAGE_TAG"
+ echo "::endgroup::"
+ echo "::group::Image Tag ID"
+ echo "tag_id=$TAG_ID"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$CACHE_FILE_NAME"
echo "::endgroup::"
- echo "$DIGEST" > "$CACHE_FILE_NAME"
+ echo "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
+ echo "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
+
+ echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
+ echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache/save@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ matrix.build }}_${{ matrix.arch }}
+ path: |
+ ${{ matrix.build }}_${{ matrix.arch }}_tag_id
+ ${{ matrix.build }}_${{ matrix.arch }}_tag
key: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
+ - name: Push image to local storage
+ id: push_image
+ env:
+ IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+ echo "podman push \"${IMAGE_TAG}\" dir:\"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
+ podman push "${IMAGE_TAG}" dir:"${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
+ - name: Post build image
+ if: ${{ success() || failure() }}
+ env:
+ GITHUB_WORKSPACE: ${{ github.workspace }}
+ run: |
+ echo "::group::Result"
+
+ rm -rf "$GITHUB_WORKSPACE/iidfile"
+
+ echo "Removing working containers"
+ buildah rm -a 2>/dev/null || true
+ echo "Removing container data in storage not controlled by podman"
+ podman system prune --external 2>/dev/null
+ echo "Removing all unused container data with volumes"
+ podman system prune -a --volumes -f 2>/dev/null
+ echo "Reseting podman storage to default state"
+ podman system reset -f 2>/dev/null || true
+
+ echo "::endgroup::"
+
+ - name: Check on failures
+ if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
+ env:
+ IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
+ run: |
+ echo "::group::Removing orphaned image"
+ rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
+ echo "::endgroup::"
+
build_images:
timeout-minutes: 90
needs: [ "build_base_database", "init_build"]
@@ -274,79 +481,81 @@ jobs:
fail-fast: false
matrix:
build: ${{ fromJson(needs.init_build.outputs.components) }}
- arch: [X64, ARM64]
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
- id-token: write
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
fetch-depth: 1
- - name: Fix string case
- id: lc
+ - name: Variables formating
+ id: var_format
env:
- ARCH: ${{ matrix.arch }}
+ MATRIX_BUILD: ${{ matrix.build }}
run: |
- echo "arch=${ARCH,,}" >> $GITHUB_OUTPUT
+ MATRIX_BUILD=${MATRIX_BUILD^^}
+ MATRIX_BUILD=${MATRIX_BUILD//-/_}
+
+ echo "::group::Result"
+ echo "matrix_build=${MATRIX_BUILD}"
+ echo "::endgroup::"
+ echo "matrix_build=${MATRIX_BUILD}" >> $GITHUB_OUTPUT
- name: Detect Build Base Image
id: build_base_image
+ if: ${{ matrix.build != 'snmptraps' }}
env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
MATRIX_BUILD: ${{ matrix.build }}
- CURRENT_BRANCH: ${{ needs.init_build.outputs.current_branch }}
+ MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
- BUILD_BASE=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".build_base")
+ BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\".base" "$MATRIX_FILE")
- echo "::group::Build base image"
- echo "build_base=$BUILD_BASE"
+ echo "::group::Base Build Image"
+ echo "$BUILD_BASE"
echo "::endgroup::"
- echo "build_base=$BUILD_BASE" >> $GITHUB_OUTPUT
+ echo "build_base=${BUILD_BASE}" >> $GITHUB_OUTPUT
- - name: Generate image name
- id: image_name
- env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
- MATRIX_BUILD: ${{ matrix.build }}
- CURRENT_BRANCH: ${{ needs.init_build.outputs.current_branch }}
- run: |
- IMAGE_NAME=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".login")
-
- echo "::add-mask::$IMAGE_NAME"
- echo "image_name=$IMAGE_NAME" >> $GITHUB_OUTPUT
-
- - name: Generate credentials
- id: login_credentials
- env:
- REDHAT_CERTIFY_CREDENTIALS: ${{ secrets.REDHAT_CERTIFY_CREDENTIALS }}
- MATRIX_BUILD: ${{ matrix.build }}
- CURRENT_BRANCH: ${{ needs.init_build.outputs.current_branch }}
- run: |
- IMAGE_NAME=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".login")
- REGISTRY_PASSWORD=$(jq --raw-output --argjson data "$REDHAT_CERTIFY_CREDENTIALS" -n "\$data.\"$CURRENT_BRANCH\".components.\"$MATRIX_BUILD\".secret")
-
- echo "::add-mask::$IMAGE_NAME"
- echo "::add-mask::redhat-isv-containers+$IMAGE_NAME-robot"
- echo "::add-mask::$REGISTRY_PASSWORD"
-
- echo "username=$IMAGE_NAME" >> $GITHUB_OUTPUT
- echo "password=$REGISTRY_PASSWORD" >> $GITHUB_OUTPUT
-
- - name: Log in to Quay.io
- uses: redhat-actions/podman-login@v1.6
- if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
- env:
- LOGIN: ${{ steps.login_credentials.outputs.username }}
- PASSWORD: ${{ steps.login_credentials.outputs.password }}
+ - name: Download metadata of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }}
+ if: ${{ matrix.build != 'snmptraps' }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- username: redhat-isv-containers+${{ env.LOGIN }}-robot
- password: ${{ env.PASSWORD }}
- registry: ${{ env.REGISTRY }}
- auth_file_path: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
+ path: |
+ ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}_tag_id
+ ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}_tag
+ key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.arch }}-${{ github.run_id }}
+
+ - name: Pull ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }} image
+ id: base_build
+ if: ${{ matrix.build != 'snmptraps' }}
+ env:
+ MATRIX_ARCH: ${{ matrix.arch }}
+ BASE_IMAGE: ${{ steps.build_base_image.outputs.build_base }}
+ IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
+ run: |
+ BASE_TAG_ID=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag_id")
+ BASE_IMAGE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag")
+
+ echo "::group::Pull image"
+ echo "podman pull dir:\"${IMAGE_DIR}/${BASE_TAG_ID}\""
+ podman pull dir:"${IMAGE_DIR}/${BASE_TAG_ID}"
+ echo "::endgroup::"
+
+ echo "::group::Tag image"
+ echo "podman tag \"${BASE_TAG_ID}\" \"${BASE_IMAGE_TAG}\""
+ podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
+ echo "::endgroup::"
+
+ echo "::group::SHA256 tag"
+ DIGEST=$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')
+ BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
+ echo "digest=${BASE_BUILD_IMAGE}"
+ echo "::endgroup::"
+
+ echo "base_build_image=${BASE_BUILD_IMAGE}" >> $GITHUB_OUTPUT
- name: Remove smartmontools
if: ${{ matrix.build == 'agent2' }}
@@ -357,9 +566,9 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
- images: ${{ env.REGISTRY }}/${{ env.REGISTRY_NAMESPACE }}/${{ steps.image_name.outputs.image_name }}
+ images: ${{ env.REGISTRY }}/${{ env.REGISTRY_NAMESPACE }}/${{ secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] || matrix.build }}
tags: |
type=semver,pattern={{version}}
type=sha
@@ -367,33 +576,9 @@ jobs:
latest=${{ github.event_name == 'release' }}
suffix=${{ matrix.arch == 'ARM64' && '-arm64' || '' }},onlatest=true
- - name: Download SHA256 tag of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }}
- uses: actions/cache@v4
- with:
- path: ${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}
- key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.arch }}-${{ github.run_id }}
-
- - name: Retrieve ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }} SHA256 tag
- id: base_build
- env:
- MATRIX_ARCH: ${{ matrix.arch }}
- BASE_IMAGE: ${{ steps.build_base_image.outputs.build_base }}
- IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
- run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
-
- echo "::group::Base build image information"
- echo "base_tag=${BASE_TAG}"
- echo "base_build_image=${BUILD_BASE_IMAGE}"
- echo "::endgroup::"
-
- echo "base_tag=${BASE_TAG}" >> $GITHUB_OUTPUT
- echo "base_build_image=${BUILD_BASE_IMAGE}" >> $GITHUB_OUTPUT
-
- - name: Build ${{ matrix.build }}
+ - name: Build image
id: build_image
- uses: redhat-actions/buildah-build@v2
+ uses: redhat-actions/buildah-build@b4dc19b4ba891854660ab1f88a097d45aa158f76 # v2.12
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel
layers: false
@@ -405,18 +590,27 @@ jobs:
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
build-args: BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
- - name: Push to RedHat certification procedure
+ - name: Log in to ${{ env.REGISTRY }}
+ uses: redhat-actions/podman-login@9184318aae1ee5034fbfbacc0388acf12669171f # v1.6
+ if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
+ with:
+ username: ${{ format('redhat-isv-containers+{0}-robot', secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)]) }}
+ password: ${{ secrets[format('{0}_{1}_SECRET', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
+ registry: ${{ env.REGISTRY }}
+ auth_file_path: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
+
+ - name: Push to RedHat certification procedure (1st)
id: push_to_registry
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
- uses: redhat-actions/push-to-registry@v2
+ uses: redhat-actions/push-to-registry@9986a6552bc4571882a4a67e016b17361412b4df # v2.7.1
with:
tags: ${{ steps.meta.outputs.tags }}
- - name: Preflight
+ - name: Preflight certification
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
env:
PFLT_DOCKERCONFIG: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
- PFLT_CERTIFICATION_PROJECT_ID: ${{ steps.login_credentials.outputs.username }}
+ PFLT_CERTIFICATION_PROJECT_ID: ${{ secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
PFLT_PYXIS_API_TOKEN: ${{ secrets.REDHAT_API_TOKEN }}
PFLT_ARTIFACTS: ${{ env.PFLT_ARTIFACTS }}
PFLT_LOGLEVEL: ${{ env.PFLT_LOGLEVEL }}
@@ -424,7 +618,7 @@ jobs:
PREFLIGHT_IMAGE: ${{ env.PREFLIGHT_IMAGE }}
run: |
mkdir -p $PFLT_ARTIFACTS
- echo "::group::Pull preflight image"
+ echo "::group::Pull preflight \"$PREFLIGHT_IMAGE\" image"
podman pull "$PREFLIGHT_IMAGE"
echo "::endgroup::"
@@ -445,70 +639,91 @@ jobs:
podman rmi -i -f "$PREFLIGHT_IMAGE"
echo "::endgroup::"
- - name: Push to RedHat certification procedure
+ - name: Push to RedHat certification procedure (all tags)
id: push_to_registry_all_tags
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
- uses: redhat-actions/push-to-registry@v2
+ uses: redhat-actions/push-to-registry@9986a6552bc4571882a4a67e016b17361412b4df # v2.7.1
with:
tags: ${{ steps.meta.outputs.tags }}
- - name: Cleanup artifacts
- if: ${{ always() }}
+ - name: Post Preflight certification
+ if: ${{ env.AUTO_PUSH_IMAGES == 'true' && (success() || failure()) }}
env:
PREFLIGHT_IMAGE: ${{ env.PREFLIGHT_IMAGE }}
PFLT_ARTIFACTS: ${{ env.PFLT_ARTIFACTS }}
- TAGS: ${{ steps.meta.outputs.tags }}
run: |
- echo "::group::Post build actions"
- echo "$TAGS" | while IFS= read -r image_name ; do podman rmi -i -f "$image_name"; done
+ echo "::group::Result"
rm -rf "$PFLT_ARTIFACTS"
podman rmi -i -f "$PREFLIGHT_IMAGE"
echo "::endgroup::"
- clean_artifacts:
+ - name: Post build image
+ if: ${{ success() || failure() }}
+ run: |
+ echo "::group::Result"
+
+ echo "Removing working containers"
+ buildah rm -a 2>/dev/null || true
+ echo "Removing container data in storage not controlled by podman"
+ podman system prune --external 2>/dev/null
+ echo "Removing all unused container data with volumes"
+ podman system prune -a --volumes -f 2>/dev/null
+ echo "Reseting podman storage to default state"
+ podman system reset -f 2>/dev/null || true
+
+ echo "::endgroup::"
+
+ clear_artifacts:
timeout-minutes: 90
needs: [ "build_images", "init_build"]
- name: Build ${{ matrix.build }} image (${{ matrix.arch }})
+ name: Clear ${{ matrix.build }} image cache (${{ matrix.arch }})
strategy:
fail-fast: false
matrix:
- build: [build-mysql, build-sqlite3]
- arch: [X64, ARM64]
+ build: ${{ fromJson(needs.init_build.outputs.database) }}
+ arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
- if: ${{ always() && needs.build_base_database.result == 'success' }}
+ if: ${{ needs.build_base_database.result == 'success' }}
permissions: {}
steps:
- - name: Download SHA256 tag of ${{ matrix.build }}:${{ matrix.arch }}
- uses: actions/cache@v4
+ - name: Download metadata of ${{ matrix.build }}:${{ matrix.arch }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ matrix.build }}_${{ matrix.arch }}
+ path: |
+ ${{ matrix.build }}_${{ matrix.arch }}_tag_id
+ ${{ matrix.build }}_${{ matrix.arch }}_tag
key: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
- - name: Remove ${{ matrix.build }}:${{ matrix.arch }} SHA256 tag
+ - name: Remove ${{ matrix.build }}:${{ matrix.arch }} cache
env:
- MATRIX_ARCH: ${{ matrix.arch }}
- BASE_IMAGE: ${{ matrix.build }}
- IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
+ CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.arch }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
+ echo "::group::Result"
- podman rmi -i -f "$BUILD_BASE_IMAGE"
+ BASE_TAG=$(cat "${CACHE_FILE_NAME}_tag_id")
+ echo "Removing ${IMAGE_DIR}/${BASE_TAG}"
+ rm -rf "${IMAGE_DIR}/${BASE_TAG}"
- - name: Download SHA256 tag of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
- uses: actions/cache@v4
+ echo "::endgroup::"
+
+ - name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
+ uses: actions/cache/restore@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
- path: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ path: |
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
+ ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
-
- - name: Remove ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} SHA256 tag
+ - name: Remove ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} cache
env:
- MATRIX_ARCH: ${{ matrix.arch }}
- BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
- IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
+ CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
+ IMAGE_DIR: ${{ env.IMAGE_DIR }}
run: |
- BASE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}")
- BUILD_BASE_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${BASE_TAG}"
+ echo "::group::Result"
- podman rmi -i -f "$BUILD_BASE_IMAGE"
+ BASE_TAG=$(cat "${CACHE_FILE_NAME}_tag_id")
+ echo "Removing ${IMAGE_DIR}/${BASE_TAG}"
+ rm -rf "${IMAGE_DIR}/${BASE_TAG}"
+
+ echo "::endgroup::"
diff --git a/.github/workflows/images_build_windows.yml b/.github/workflows/images_build_windows.yml
index 7da254980..154f26431 100644
--- a/.github/workflows/images_build_windows.yml
+++ b/.github/workflows/images_build_windows.yml
@@ -67,7 +67,7 @@ jobs:
github.com:443
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -155,7 +155,7 @@ jobs:
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -169,7 +169,7 @@ jobs:
run: cosign version
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -190,7 +190,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_IMAGE_NAME }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -324,7 +324,7 @@ jobs:
$Env:DIGEST | Set-Content -Path $Env:CACHE_FILE_NAME
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -344,7 +344,7 @@ jobs:
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -358,7 +358,7 @@ jobs:
run: cosign version
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -379,7 +379,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_BUILD_IMAGE_NAME }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -395,7 +395,7 @@ jobs:
latest=false
- name: Download SHA256 tag of ${{ env.BASE_IMAGE_NAME }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -551,7 +551,7 @@ jobs:
$Env:DIGEST | Set-Content -Path $Env:CACHE_FILE_NAME
- name: Cache image digest
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
@@ -571,7 +571,7 @@ jobs:
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
- uses: actions/checkout@v4
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
@@ -585,7 +585,7 @@ jobs:
run: cosign version
- name: Login to DockerHub
- uses: docker/login-action@v3
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -606,7 +606,7 @@ jobs:
- name: Generate tags
id: meta
- uses: docker/metadata-action@v5
+ uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.component }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
@@ -622,7 +622,7 @@ jobs:
latest=false
- name: Download SHA256 tag of ${{ env.BASE_BUILD_IMAGE_NAME }}:${{ matrix.os }}
- uses: actions/cache@v4
+ uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with:
path: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 08faac455..3aa8ef685 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -32,13 +32,18 @@ jobs:
# actions: read
steps:
+ - name: Harden Runner
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ egress-policy: audit
+
- name: "Checkout code"
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
persist-credentials: false
- name: "Run analysis"
- uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2
+ uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with:
results_file: results.sarif
results_format: sarif
@@ -60,7 +65,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
+ uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: SARIF file
path: results.sarif
@@ -68,6 +73,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
- uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
+ uses: github/codeql-action/upload-sarif@e675ced7a7522a761fc9c8eb26682c8b27c42b2b # v3.24.1
with:
sarif_file: results.sarif
diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml
new file mode 100644
index 000000000..2fa6f8be5
--- /dev/null
+++ b/.github/workflows/sonarcloud.yml
@@ -0,0 +1,79 @@
+# This workflow uses actions that are not certified by GitHub.
+# They are provided by a third-party and are governed by
+# separate terms of service, privacy policy, and support
+# documentation.
+
+# This workflow helps you trigger a SonarCloud analysis of your code and populates
+# GitHub Code Scanning alerts with the vulnerabilities found.
+# Free for open source project.
+
+# 1. Login to SonarCloud.io using your GitHub account
+
+# 2. Import your project on SonarCloud
+# * Add your GitHub organization first, then add your repository as a new project.
+# * Please note that many languages are eligible for automatic analysis,
+# which means that the analysis will start automatically without the need to set up GitHub Actions.
+# * This behavior can be changed in Administration > Analysis Method.
+#
+# 3. Follow the SonarCloud in-product tutorial
+# * a. Copy/paste the Project Key and the Organization Key into the args parameter below
+# (You'll find this information in SonarCloud. Click on "Information" at the bottom left)
+#
+# * b. Generate a new token and add it to your Github repository's secrets using the name SONAR_TOKEN
+# (On SonarCloud, click on your avatar on top-right > My account > Security
+# or go directly to https://sonarcloud.io/account/security/)
+
+# Feel free to take a look at our documentation (https://docs.sonarcloud.io/getting-started/github/)
+# or reach out to our community forum if you need some help (https://community.sonarsource.com/c/help/sc/9)
+
+name: SonarCloud analysis
+
+on:
+ push:
+ branches: [ "6.4" ]
+ pull_request:
+ branches: [ "6.4" ]
+ workflow_dispatch:
+
+permissions:
+ pull-requests: read # allows SonarCloud to decorate PRs with analysis results
+
+jobs:
+ Analysis:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Block egress traffic
+ uses: step-security/harden-runner@63c24ba6bd7ba022e95695ff85de572c04a18142 # v2.7.0
+ with:
+ egress-policy: audit
+
+ - name: Checkout repository
+ uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ with:
+ # Disabling shallow clone is recommended for improving relevancy of reporting
+ fetch-depth: 0
+
+ - name: Analyze with SonarCloud
+
+ # You can pin the exact commit or the version.
+ # uses: SonarSource/sonarcloud-github-action@49e6cd3b187936a73b8280d59ffd9da69df63ec9
+ uses: SonarSource/sonarcloud-github-action@49e6cd3b187936a73b8280d59ffd9da69df63ec9 # v2.1.1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} # Generate a token on Sonarcloud.io, add it to the secrets of this repo with the name SONAR_TOKEN (Settings > Secrets > Actions > add new repository secret)
+ with:
+ # Additional arguments for the sonarcloud scanner
+ args:
+ # Unique keys of your project and organization. You can find them in SonarCloud > Information (bottom-left menu)
+ # mandatory
+ -Dsonar.projectKey=zabbix_zabbix-docker
+ -Dsonar.organization=zabbix
+ # Comma-separated paths to directories containing main source files.
+ #-Dsonar.sources= # optional, default is project base directory
+ # When you need the analysis to take place in a directory other than the one from which it was launched
+ #-Dsonar.projectBaseDir= # optional, default is .
+ # Comma-separated paths to directories containing test source files.
+ #-Dsonar.tests= # optional. For more info about Code Coverage, please refer to https://docs.sonarcloud.io/enriching/test-coverage/overview/
+ # Adds more detail to both client and server-side analysis logs, activating DEBUG mode for the scanner, and adding client-side environment variables and system properties to the server-side log of analysis report processing.
+ #-Dsonar.verbose= # optional, default is false
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..74989927d
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,41 @@
+repos:
+- repo: https://github.com/gitleaks/gitleaks
+ rev: v8.16.3
+ hooks:
+ - id: gitleaks
+- repo: https://github.com/jumanjihouse/pre-commit-hooks
+ rev: 3.0.0
+ hooks:
+ - id: shellcheck
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: end-of-file-fixer
+ exclude: |
+ (?x)(
+ ^env_vars/\.MYSQL |
+ ^env_vars/\.POSTGRES
+ )
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: trailing-whitespace
+ exclude: |
+ (?x)(
+ .*\.patch$
+ )
+ - id: check-yaml
+ args: [--allow-multiple-documents]
+ - id: check-symlinks
+# - id: pretty-format-json
+ - id: check-added-large-files
+#- repo: https://github.com/adrienverge/yamllint.git
+# rev: v1.21.0 # or higher tag
+# hooks:
+# - id: yamllint
+# args: [--format, parsable, --strict]
+#- repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt
+# rev: 0.2.3
+# hooks:
+# - id: yamlfmt
+# args: [--mapping, '2', --sequence, '1', --offset, '0', --colons, --width, '400']
diff --git a/Dockerfiles/agent/rhel/Dockerfile b/Dockerfiles/agent/rhel/Dockerfile
index 94ba272a8..9d2aecece 100644
--- a/Dockerfiles/agent/rhel/Dockerfile
+++ b/Dockerfiles/agent/rhel/Dockerfile
@@ -58,7 +58,7 @@ RUN set -eux && \
shadow-utils \
pcre2 \
libcurl" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y install \
diff --git a/Dockerfiles/agent/windows/docker-entrypoint.ps1 b/Dockerfiles/agent/windows/docker-entrypoint.ps1
index aaef8c29c..eaea6e08c 100644
--- a/Dockerfiles/agent/windows/docker-entrypoint.ps1
+++ b/Dockerfiles/agent/windows/docker-entrypoint.ps1
@@ -40,7 +40,7 @@ function Update-Config-Var {
if (-not(Test-Path -Path $ConfigPath -PathType Leaf)) {
throw "**** Configuration file '$ConfigPath' does not exist"
}
-
+
if ($MaskList.Contains($VarName) -eq $true -And [string]::IsNullOrWhitespace($VarValue) -ne $true) {
Write-Host -NoNewline "** Updating '$ConfigPath' parameter ""$VarName"": '****'. Enable DEBUG_MODE to view value ..."
}
@@ -50,12 +50,12 @@ function Update-Config-Var {
if ([string]::IsNullOrWhitespace($VarValue)) {
if ((Get-Content $ConfigPath | %{$_ -match "^$VarName="}) -contains $true) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Where-Object {$_ -notmatch "^$VarName=" } |
Set-Content $ConfigPath
}
- Write-Host "removed"
+ Write-Host "removed"
return
}
@@ -64,7 +64,7 @@ function Update-Config-Var {
Write-Host "undefined"
return
}
-
+
if ($VarName -match '^TLS.*File$') {
$VarValue="$ZabbixUserHomeDir\enc\$VarValue"
}
@@ -75,7 +75,7 @@ function Update-Config-Var {
Write-Host updated
}
elseif ((Get-Content $ConfigPath | select-string -pattern "^[#;] $VarName=").length -gt 0) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Foreach-Object {
$_
if ($_ -match "^[#;] $VarName=") {
diff --git a/Dockerfiles/agent2/rhel/Dockerfile b/Dockerfiles/agent2/rhel/Dockerfile
index cb997b91b..d919334ee 100644
--- a/Dockerfiles/agent2/rhel/Dockerfile
+++ b/Dockerfiles/agent2/rhel/Dockerfile
@@ -65,7 +65,7 @@ RUN set -eux && \
smartmontools \
sudo \
libcurl" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y install \
diff --git a/Dockerfiles/agent2/rhel/licenses/apache-2.0.txt b/Dockerfiles/agent2/rhel/licenses/apache-2.0.txt
index 39f57220f..9006b97b3 100644
--- a/Dockerfiles/agent2/rhel/licenses/apache-2.0.txt
+++ b/Dockerfiles/agent2/rhel/licenses/apache-2.0.txt
@@ -199,4 +199,4 @@ Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
-limitations under the License.
\ No newline at end of file
+limitations under the License.
diff --git a/Dockerfiles/agent2/windows/docker-entrypoint.ps1 b/Dockerfiles/agent2/windows/docker-entrypoint.ps1
index 3b6753055..415c0f6e9 100644
--- a/Dockerfiles/agent2/windows/docker-entrypoint.ps1
+++ b/Dockerfiles/agent2/windows/docker-entrypoint.ps1
@@ -40,7 +40,7 @@ function Update-Config-Var {
if (-not(Test-Path -Path $ConfigPath -PathType Leaf)) {
throw "**** Configuration file '$ConfigPath' does not exist"
}
-
+
if ($MaskList.Contains($VarName) -eq $true -And [string]::IsNullOrWhitespace($VarValue) -ne $true) {
Write-Host -NoNewline "** Updating '$ConfigPath' parameter ""$VarName"": '****'. Enable DEBUG_MODE to view value ..."
}
@@ -50,12 +50,12 @@ function Update-Config-Var {
if ([string]::IsNullOrWhitespace($VarValue)) {
if ((Get-Content $ConfigPath | %{$_ -match "^$VarName="}) -contains $true) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Where-Object {$_ -notmatch "^$VarName=" } |
Set-Content $ConfigPath
}
- Write-Host "removed"
+ Write-Host "removed"
return
}
@@ -64,7 +64,7 @@ function Update-Config-Var {
Write-Host "undefined"
return
}
-
+
if ($VarName -match '^TLS.*File$') {
$VarValue="$ZabbixUserHomeDir\enc\$VarValue"
}
@@ -75,7 +75,7 @@ function Update-Config-Var {
Write-Host updated
}
elseif ((Get-Content $ConfigPath | select-string -pattern "^[#;] $VarName=").length -gt 0) {
- (Get-Content $ConfigPath) |
+ (Get-Content $ConfigPath) |
Foreach-Object {
$_
if ($_ -match "^[#;] $VarName=") {
@@ -163,7 +163,6 @@ function Prepare-Zbx-Agent-Config {
else {
Update-Config-Var $ZbxAgentConfig "ServerActive"
}
-
Update-Config-Var $ZbxAgentConfig "ForceActiveChecksOnStart" "$env:ZBX_FORCEACTIVECHECKSONSTART"
if ([string]::IsNullOrWhitespace($env:ZBX_ENABLEPERSISTENTBUFFER)) {
diff --git a/Dockerfiles/build-base/windows/modbus.vs16.sln b/Dockerfiles/build-base/windows/modbus.vs16.sln
index 19ce76466..5e593bcee 100644
--- a/Dockerfiles/build-base/windows/modbus.vs16.sln
+++ b/Dockerfiles/build-base/windows/modbus.vs16.sln
@@ -28,4 +28,4 @@ Global
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {6CF51673-664F-4C9F-B3FE-991FF423F3B6}
EndGlobalSection
-EndGlobal
\ No newline at end of file
+EndGlobal
diff --git a/Dockerfiles/build-base/windows/modbus.vs16.vcxproj b/Dockerfiles/build-base/windows/modbus.vs16.vcxproj
index 9cf530ccf..e1d48e03c 100644
--- a/Dockerfiles/build-base/windows/modbus.vs16.vcxproj
+++ b/Dockerfiles/build-base/windows/modbus.vs16.vcxproj
@@ -262,4 +262,4 @@
-
\ No newline at end of file
+
diff --git a/Dockerfiles/build-base/windows/modbus.vs16.vcxproj.filters b/Dockerfiles/build-base/windows/modbus.vs16.vcxproj.filters
index 819f877e5..19d40f654 100644
--- a/Dockerfiles/build-base/windows/modbus.vs16.vcxproj.filters
+++ b/Dockerfiles/build-base/windows/modbus.vs16.vcxproj.filters
@@ -59,4 +59,4 @@
Resource Files
-
\ No newline at end of file
+
diff --git a/Dockerfiles/build-mysql/alpine/Dockerfile b/Dockerfiles/build-mysql/alpine/Dockerfile
index 2069a3c3d..d3db930d7 100644
--- a/Dockerfiles/build-mysql/alpine/Dockerfile
+++ b/Dockerfiles/build-mysql/alpine/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/centos/Dockerfile b/Dockerfiles/build-mysql/centos/Dockerfile
index 3de696b9e..d8cd93c79 100644
--- a/Dockerfiles/build-mysql/centos/Dockerfile
+++ b/Dockerfiles/build-mysql/centos/Dockerfile
@@ -73,7 +73,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/ol/Dockerfile b/Dockerfiles/build-mysql/ol/Dockerfile
index a0a18c9f7..5ae220b2f 100644
--- a/Dockerfiles/build-mysql/ol/Dockerfile
+++ b/Dockerfiles/build-mysql/ol/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/rhel/Dockerfile b/Dockerfiles/build-mysql/rhel/Dockerfile
index 6aa1edba1..ad883f5a8 100644
--- a/Dockerfiles/build-mysql/rhel/Dockerfile
+++ b/Dockerfiles/build-mysql/rhel/Dockerfile
@@ -82,7 +82,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-mysql/ubuntu/Dockerfile b/Dockerfiles/build-mysql/ubuntu/Dockerfile
index a83fabf7f..29da90e9b 100644
--- a/Dockerfiles/build-mysql/ubuntu/Dockerfile
+++ b/Dockerfiles/build-mysql/ubuntu/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/mysql/create.sql > database/mysql/create_proxy.sql.gz && \
rm -rf database/mysql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/alpine/Dockerfile b/Dockerfiles/build-pgsql/alpine/Dockerfile
index 45ba9b1a5..703f96d83 100644
--- a/Dockerfiles/build-pgsql/alpine/Dockerfile
+++ b/Dockerfiles/build-pgsql/alpine/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/centos/Dockerfile b/Dockerfiles/build-pgsql/centos/Dockerfile
index 0b8754c16..f3f07ed05 100644
--- a/Dockerfiles/build-pgsql/centos/Dockerfile
+++ b/Dockerfiles/build-pgsql/centos/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/ol/Dockerfile b/Dockerfiles/build-pgsql/ol/Dockerfile
index c4f23982f..94e2a68aa 100644
--- a/Dockerfiles/build-pgsql/ol/Dockerfile
+++ b/Dockerfiles/build-pgsql/ol/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-pgsql/ubuntu/Dockerfile b/Dockerfiles/build-pgsql/ubuntu/Dockerfile
index 7cee56822..a25623053 100644
--- a/Dockerfiles/build-pgsql/ubuntu/Dockerfile
+++ b/Dockerfiles/build-pgsql/ubuntu/Dockerfile
@@ -72,7 +72,7 @@ RUN set -eux && \
gzip -c database/postgresql/create.sql > database/postgresql/create_proxy.sql.gz && \
rm -rf database/postgresql/create.sql && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/alpine/Dockerfile b/Dockerfiles/build-sqlite3/alpine/Dockerfile
index 36bf45360..ce52af7a7 100644
--- a/Dockerfiles/build-sqlite3/alpine/Dockerfile
+++ b/Dockerfiles/build-sqlite3/alpine/Dockerfile
@@ -58,7 +58,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/centos/Dockerfile b/Dockerfiles/build-sqlite3/centos/Dockerfile
index 5f3328f6e..13636dac5 100644
--- a/Dockerfiles/build-sqlite3/centos/Dockerfile
+++ b/Dockerfiles/build-sqlite3/centos/Dockerfile
@@ -58,7 +58,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/ol/Dockerfile b/Dockerfiles/build-sqlite3/ol/Dockerfile
index 2d35c7404..8616db809 100644
--- a/Dockerfiles/build-sqlite3/ol/Dockerfile
+++ b/Dockerfiles/build-sqlite3/ol/Dockerfile
@@ -58,7 +58,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/rhel/Dockerfile b/Dockerfiles/build-sqlite3/rhel/Dockerfile
index fabba280f..816c16d9a 100644
--- a/Dockerfiles/build-sqlite3/rhel/Dockerfile
+++ b/Dockerfiles/build-sqlite3/rhel/Dockerfile
@@ -68,7 +68,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/build-sqlite3/ubuntu/Dockerfile b/Dockerfiles/build-sqlite3/ubuntu/Dockerfile
index b1bc8cc17..c807b8969 100644
--- a/Dockerfiles/build-sqlite3/ubuntu/Dockerfile
+++ b/Dockerfiles/build-sqlite3/ubuntu/Dockerfile
@@ -58,7 +58,7 @@ RUN set -eux && \
make -j"$(nproc)" -s dbschema && \
make -j"$(nproc)" -s && \
mkdir /tmp/fonts/ && \
- curl --silent -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
+ curl --tlsv1.2 -sSf -L "https://noto-website.storage.googleapis.com/pkgs/NotoSansCJKjp-hinted.zip" -o /tmp/fonts/NotoSansCJKjp-hinted.zip && \
unzip /tmp/fonts/NotoSansCJKjp-hinted.zip -d /tmp/fonts/ && \
cp /tmp/fonts/NotoSansCJKjp-Regular.otf /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/NotoSansCJKjp-Regular.ttf && \
cp /tmp/fonts/LICENSE_OFL.txt /tmp/zabbix-${ZBX_VERSION}/ui/assets/fonts/ && \
diff --git a/Dockerfiles/proxy-mysql/README.md b/Dockerfiles/proxy-mysql/README.md
index f95682e96..a9abfc711 100644
--- a/Dockerfiles/proxy-mysql/README.md
+++ b/Dockerfiles/proxy-mysql/README.md
@@ -113,7 +113,7 @@ This variable is port Zabbix server listening on. By default, value is `10051`.
This variable is IP or DNS name of MySQL server. By default, value is 'mysql-server'
### `DB_SERVER_PORT`
-
+
This variable is port of MySQL server. By default, value is '3306'.
### `MYSQL_USER`, `MYSQL_PASSWORD`, `MYSQL_USER_FILE`, `MYSQL_PASSWORD_FILE`
diff --git a/Dockerfiles/proxy-mysql/rhel/Dockerfile b/Dockerfiles/proxy-mysql/rhel/Dockerfile
index 2eedd3746..d817eb708 100644
--- a/Dockerfiles/proxy-mysql/rhel/Dockerfile
+++ b/Dockerfiles/proxy-mysql/rhel/Dockerfile
@@ -73,7 +73,7 @@ RUN set -eux && \
pcre2 \
gzip \
unixODBC" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y module enable mysql && \
diff --git a/Dockerfiles/proxy-sqlite3/rhel/Dockerfile b/Dockerfiles/proxy-sqlite3/rhel/Dockerfile
index 4e564fe20..1098458cc 100644
--- a/Dockerfiles/proxy-sqlite3/rhel/Dockerfile
+++ b/Dockerfiles/proxy-sqlite3/rhel/Dockerfile
@@ -70,7 +70,7 @@ RUN set -eux && \
pcre2 \
sqlite-libs \
unixODBC" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y install \
diff --git a/Dockerfiles/server-mysql/rhel/Dockerfile b/Dockerfiles/server-mysql/rhel/Dockerfile
index 51f8f822b..f7483c86f 100644
--- a/Dockerfiles/server-mysql/rhel/Dockerfile
+++ b/Dockerfiles/server-mysql/rhel/Dockerfile
@@ -75,7 +75,7 @@ RUN set -eux && \
pcre2 \
gzip \
unixODBC" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y module enable mysql && \
diff --git a/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh b/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh
index 3e2bb7c0d..d1d3f1813 100755
--- a/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/alpine/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh b/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh
index 3e2bb7c0d..d1d3f1813 100755
--- a/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/centos/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh b/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh
index 3e2bb7c0d..d1d3f1813 100755
--- a/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/ol/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh b/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh
index 73f8ae46f..965c26596 100755
--- a/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh
+++ b/Dockerfiles/server-pgsql/ubuntu/docker-entrypoint.sh
@@ -331,7 +331,7 @@ apply_db_scripts() {
}
create_db_schema_postgresql() {
- DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
+ DBVERSION_TABLE_EXISTS=$(psql_query "SELECT 1 FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid =
c.relnamespace WHERE n.nspname = '$DB_SERVER_SCHEMA' AND c.relname = 'dbversion'" "${DB_SERVER_DBNAME}")
if [ -n "${DBVERSION_TABLE_EXISTS}" ]; then
diff --git a/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-mysql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-mysql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile b/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile
index 8e8223ad3..3665501d6 100644
--- a/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile
+++ b/Dockerfiles/web-apache-mysql/ubuntu/Dockerfile
@@ -104,7 +104,7 @@ RUN set -eux && \
rm -rf /var/lib/apt/lists/*
EXPOSE 8080/TCP 8443/TCP
-
+
WORKDIR /usr/share/zabbix
COPY ["docker-entrypoint.sh", "/usr/bin/"]
diff --git a/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-pgsql/centos/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf b/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
index 7b1080149..c28b761b9 100644
--- a/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
+++ b/Dockerfiles/web-apache-pgsql/ol/conf/etc/httpd/conf.d/99-zabbix.conf
@@ -1,3 +1,3 @@
PidFile "/tmp/httpd.pid"
-
\ No newline at end of file
+
diff --git a/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-apache-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/rhel/Dockerfile b/Dockerfiles/web-nginx-mysql/rhel/Dockerfile
index cb5740346..9233007fa 100644
--- a/Dockerfiles/web-nginx-mysql/rhel/Dockerfile
+++ b/Dockerfiles/web-nginx-mysql/rhel/Dockerfile
@@ -66,7 +66,7 @@ RUN set -eux && \
php-mbstring \
php-mysqlnd \
php-xml" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
microdnf -y module enable mysql && \
diff --git a/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/rhel/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-mysql/ubuntu/Dockerfile b/Dockerfiles/web-nginx-mysql/ubuntu/Dockerfile
index 7de06a56a..86f59f843 100644
--- a/Dockerfiles/web-nginx-mysql/ubuntu/Dockerfile
+++ b/Dockerfiles/web-nginx-mysql/ubuntu/Dockerfile
@@ -59,7 +59,7 @@ RUN set -eux && \
curl https://nginx.org/keys/nginx_signing.key | gpg --dearmor > /etc/apt/trusted.gpg.d/nginx.gpg && \
gpg --dry-run --quiet --import --import-options import-show /etc/apt/trusted.gpg.d/nginx.gpg && \
DISTRIB_CODENAME=$(/bin/bash -c 'source /etc/lsb-release && echo $DISTRIB_CODENAME') && \
- echo "deb http://nginx.org/packages/ubuntu $DISTRIB_CODENAME nginx" > /etc/apt/sources.list.d/nginx.list && \
+ echo "deb https://nginx.org/packages/ubuntu $DISTRIB_CODENAME nginx" > /etc/apt/sources.list.d/nginx.list && \
echo "Package: *\nPin: origin nginx.org\nPin: release o=nginx\nPin-Priority: 900\n" \
> /etc/apt/preferences.d/99nginx && \
gpgconf --kill all && \
diff --git a/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-mysql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/alpine/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/centos/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/ol/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-nginx-pgsql/ubuntu/Dockerfile b/Dockerfiles/web-nginx-pgsql/ubuntu/Dockerfile
index d9584a74e..3a2fc9e71 100644
--- a/Dockerfiles/web-nginx-pgsql/ubuntu/Dockerfile
+++ b/Dockerfiles/web-nginx-pgsql/ubuntu/Dockerfile
@@ -59,7 +59,7 @@ RUN set -eux && \
curl https://nginx.org/keys/nginx_signing.key | gpg --dearmor > /etc/apt/trusted.gpg.d/nginx.gpg && \
gpg --dry-run --quiet --import --import-options import-show /etc/apt/trusted.gpg.d/nginx.gpg && \
DISTRIB_CODENAME=$(/bin/bash -c 'source /etc/lsb-release && echo $DISTRIB_CODENAME') && \
- echo "deb http://nginx.org/packages/ubuntu $DISTRIB_CODENAME nginx" > /etc/apt/sources.list.d/nginx.list && \
+ echo "deb https://nginx.org/packages/ubuntu $DISTRIB_CODENAME nginx" > /etc/apt/sources.list.d/nginx.list && \
echo "Package: *\nPin: origin nginx.org\nPin: release o=nginx\nPin-Priority: 900\n" \
> /etc/apt/preferences.d/99nginx && \
gpgconf --kill all && \
diff --git a/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php b/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
index efa8ac849..1b1ed63b5 100644
--- a/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
+++ b/Dockerfiles/web-nginx-pgsql/ubuntu/conf/etc/zabbix/web/zabbix.conf.php
@@ -19,25 +19,25 @@ if (getenv('ZBX_SERVER_HOST')) {
$ZBX_SERVER_NAME = getenv('ZBX_SERVER_NAME');
// Used for TLS connection.
-$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
-$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
-$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
-$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
-$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
-$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
+$DB['ENCRYPTION'] = getenv('ZBX_DB_ENCRYPTION') == 'true' ? true: false;
+$DB['KEY_FILE'] = getenv('ZBX_DB_KEY_FILE');
+$DB['CERT_FILE'] = getenv('ZBX_DB_CERT_FILE');
+$DB['CA_FILE'] = getenv('ZBX_DB_CA_FILE');
+$DB['VERIFY_HOST'] = getenv('ZBX_DB_VERIFY_HOST') == 'true' ? true: false;
+$DB['CIPHER_LIST'] = getenv('ZBX_DB_CIPHER_LIST') ? getenv('ZBX_DB_CIPHER_LIST') : '';
// Vault configuration. Used if database credentials are stored in Vault secrets manager.
-$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
-$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
-$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
+$DB['VAULT_URL'] = getenv('ZBX_VAULTURL');
+$DB['VAULT_DB_PATH'] = getenv('ZBX_VAULTDBPATH');
+$DB['VAULT_TOKEN'] = getenv('VAULT_TOKEN');
// Use IEEE754 compatible value range for 64-bit Numeric (float) history values.
// This option is enabled by default for new Zabbix installations.
// For upgraded installations, please read database upgrade notes before enabling this option.
-$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
+$DB['DOUBLE_IEEE754'] = getenv('DB_DOUBLE_IEEE754') == 'true' ? true: false;
-$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
+$IMAGE_FORMAT_DEFAULT = IMAGE_FORMAT_PNG;
// Elasticsearch url (can be string if same url is used for all types).
$history_url = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGEURL'));
@@ -48,9 +48,35 @@ $storage_types = str_replace("'","\"",getenv('ZBX_HISTORYSTORAGETYPES'));
$HISTORY['types'] = (json_decode($storage_types)) ? json_decode($storage_types, true) : array();
// Used for SAML authentication.
-$SSO['SP_KEY'] = file_exists('/etc/zabbix/web/certs/sp.key') ? '/etc/zabbix/web/certs/sp.key' : (file_exists(getenv('ZBX_SSO_SP_KEY')) ? getenv('ZBX_SSO_SP_KEY') : '');
-$SSO['SP_CERT'] = file_exists('/etc/zabbix/web/certs/sp.crt') ? '/etc/zabbix/web/certs/sp.crt' : (file_exists(getenv('ZBX_SSO_SP_CERT')) ? getenv('ZBX_SSO_SP_CERT') : '');
-$SSO['IDP_CERT'] = file_exists('/etc/zabbix/web/certs/idp.crt') ? '/etc/zabbix/web/certs/idp.crt' : (file_exists(getenv('ZBX_SSO_IDP_CERT')) ? getenv('ZBX_SSO_IDP_CERT') : '');
+if (file_exists('/etc/zabbix/web/certs/sp.key')) {
+ $SSO['SP_KEY'] = '/etc/zabbix/web/certs/sp.key';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_KEY'))) {
+ $SSO['SP_KEY'] = getenv('ZBX_SSO_SP_KEY');
+}
+else {
+ $SSO['SP_KEY'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/sp.crt')) {
+ $SSO['SP_CERT'] = '/etc/zabbix/web/certs/sp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_SP_CERT'))) {
+ $SSO['SP_CERT'] = getenv('ZBX_SSO_SP_CERT');
+}
+else {
+ $SSO['SP_CERT'] = '';
+}
+
+if (file_exists('/etc/zabbix/web/certs/idp.crt')) {
+ $SSO['IDP_CERT'] = '/etc/zabbix/web/certs/idp.crt';
+}
+elseif (file_exists(getenv('ZBX_SSO_IDP_CERT'))) {
+ $SSO['IDP_CERT'] = getenv('ZBX_SSO_IDP_CERT');
+}
+else {
+ $SSO['IDP_CERT'] = '';
+}
$sso_settings = str_replace("'","\"",getenv('ZBX_SSO_SETTINGS'));
-$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
+$SSO['SETTINGS'] = (json_decode($sso_settings)) ? json_decode($sso_settings, true) : array();
diff --git a/Dockerfiles/web-service/README.md b/Dockerfiles/web-service/README.md
index 2f5be9784..b675422b3 100644
--- a/Dockerfiles/web-service/README.md
+++ b/Dockerfiles/web-service/README.md
@@ -103,7 +103,7 @@ Please use official documentation for [``zabbix_web_service.conf``](https://www.
## Allowed volumes for the Zabbix web service container
### ``/var/lib/zabbix/enc``
-
+
The volume is used to store TLS related files. These file names are specified using ``ZBX_TLSCAFILE``, ``ZBX_TLSCERTFILE`` and ``ZBX_TLSKEY_FILE`` variables.
# The image variants
diff --git a/Dockerfiles/web-service/rhel/Dockerfile b/Dockerfiles/web-service/rhel/Dockerfile
index 7e89ad29c..068dc6156 100644
--- a/Dockerfiles/web-service/rhel/Dockerfile
+++ b/Dockerfiles/web-service/rhel/Dockerfile
@@ -1,12 +1,12 @@
# syntax=docker/dockerfile:1
-ARG MAJOR_VERSION=6.0
-ARG RELEASE=26
-ARG ZBX_VERSION=${MAJOR_VERSION}.26
+ARG MAJOR_VERSION=7.0
+ARG RELEASE=0
+ARG ZBX_VERSION=${MAJOR_VERSION}
ARG BUILD_BASE_IMAGE=zabbix-build-mysql:rhel-${ZBX_VERSION}
-FROM ${BUILD_BASE_IMAGE} as builder
+FROM ${BUILD_BASE_IMAGE} AS builder
-FROM registry.access.redhat.com/ubi8/ubi-minimal
+FROM registry.access.redhat.com/ubi9/ubi-minimal:9.3
ARG MAJOR_VERSION
ARG RELEASE
@@ -19,9 +19,9 @@ ENV TERM=xterm \
LABEL description="Zabbix web service for performing various tasks using headless web browser" \
maintainer="alexey.pustovalov@zabbix.com" \
- name="zabbix/zabbix-web-service-60" \
+ name="zabbix/zabbix-web-service-trunk" \
release="${RELEASE}" \
- run="docker run --name zabbix-web-service --link zabbix-server:zabbix-server -p 10053:10053 -d registry.connect.redhat.com/zabbix/zabbix-web-service-60:${ZBX_VERSION}" \
+ run="docker run --name zabbix-web-service --link zabbix-server:zabbix-server -p 10053:10053 -d registry.connect.redhat.com/zabbix/zabbix-web-service-trunk:${ZBX_VERSION}" \
summary="Zabbix web service" \
url="https://www.zabbix.com/" \
vendor="Zabbix LLC" \
@@ -32,7 +32,7 @@ LABEL description="Zabbix web service for performing various tasks using headles
io.openshift.tags="zabbix,zabbix-web-service" \
org.label-schema.build-date="${BUILD_DATE}" \
org.label-schema.description="Zabbix web service for performing various tasks using headless web browser" \
- org.label-schema.docker.cmd="docker run --name zabbix-web-service --link zabbix-server:zabbix-server -p 10053:10053 -d registry.connect.redhat.com/zabbix/zabbix-web-service-60:${ZBX_VERSION}" \
+ org.label-schema.docker.cmd="docker run --name zabbix-web-service --link zabbix-server:zabbix-server -p 10053:10053 -d registry.connect.redhat.com/zabbix/zabbix-web-service-trunk:${ZBX_VERSION}" \
org.label-schema.license="GPL v2.0" \
org.label-schema.name="zabbix-web-service-rhel" \
org.label-schema.schema-version="1.0" \
@@ -53,15 +53,16 @@ RUN set -eux && \
INSTALL_PKGS="bash \
shadow-utils \
chromium-headless" && \
- curl -sSL -o /tmp/epel-release-latest-8.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
- rpm -ivh /tmp/epel-release-latest-8.noarch.rpm && \
- rm -rf /tmp/epel-release-latest-8.noarch.rpm && \
+ curl --tlsv1.2 -sSf -L -o /tmp/epel-release-latest-9.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm && \
+ rpm -ivh /tmp/epel-release-latest-9.noarch.rpm && \
+ rm -rf /tmp/epel-release-latest-9.noarch.rpm && \
+ ARCH_SUFFIX="$(arch)"; \
microdnf -y install \
--disablerepo "*" \
- --enablerepo "ubi-8-baseos-rpms" \
- --enablerepo "ubi-8-appstream-rpms" \
- --enablerepo "rhel-8-for-x86_64-baseos-rpms" \
- --enablerepo "rhel-8-for-x86_64-appstream-rpms" \
+ --enablerepo "ubi-9-baseos-rpms" \
+ --enablerepo "ubi-9-appstream-rpms" \
+ --enablerepo "rhel-9-for-$ARCH_SUFFIX-baseos-rpms" \
+ --enablerepo "rhel-9-for-$ARCH_SUFFIX-appstream-rpms" \
--enablerepo "epel" \
--setopt=install_weak_deps=0 \
--best \
@@ -69,8 +70,8 @@ RUN set -eux && \
${INSTALL_PKGS} && \
microdnf -y install \
--disablerepo "*" \
- --enablerepo "ubi-8-baseos-rpms" \
- --enablerepo "ubi-8-appstream-rpms" \
+ --enablerepo "ubi-9-baseos-rpms" \
+ --enablerepo "ubi-9-appstream-rpms" \
--setopt=install_weak_deps=0 \
--best \
--setopt=tsflags=nodocs \
diff --git a/Dockerfiles/web-service/ubuntu/conf/etc/apt/preferences.d/chromium.pref b/Dockerfiles/web-service/ubuntu/conf/etc/apt/preferences.d/chromium.pref
index 4c83dc746..48609ef42 100644
--- a/Dockerfiles/web-service/ubuntu/conf/etc/apt/preferences.d/chromium.pref
+++ b/Dockerfiles/web-service/ubuntu/conf/etc/apt/preferences.d/chromium.pref
@@ -11,4 +11,4 @@ Pin-Priority: 300
# named dependencies:
Package: chromium*
Pin: origin "ftp.debian.org"
-Pin-Priority: 700
\ No newline at end of file
+Pin-Priority: 700
diff --git a/README.md b/README.md
index a275c7379..634641963 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,10 @@

+
+[](https://securityscorecards.dev/viewer/?uri=github.com/zabbix/zabbix-docker)
+
+[](https://sonarcloud.io/summary/new_code?id=zabbix_zabbix-docker)
+
[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build.yml)
[](https://github.com/zabbix/zabbix-docker/actions/workflows/images_build_windows.yml)
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..4b594df62
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,32 @@
+# Disclosure policy
+
+In Zabbix we use the term "**responsible disclosure**", which means we have a policy on how we are disclosing all security issues that come to our attention, but only after the issues has been resolved and all customers with support contracts are given time to upgrade or patch their installations.
+
+We kindly ask that when you are reporting a security issue, you follow the same guidelines and share the details only with the Zabbix Security team.
+
+# Before reporting the issue:
+
+Make sure that the issue you are submitting is not related to server configuration, 3rd party scripts and utilities. In order to avoid any possible issues with server configuration we advise Zabbix users to read [Best practices for secure Zabbix setup](https://www.zabbix.com/documentation/current/manual/installation/requirements/best_practices).
+
+# How to report a security issue?
+
+[Create a new issue](https://support.zabbix.com/secure/CreateIssue.jspa) in the Zabbix Security Reports (ZBXSEC) section of the public bug tracker describing the problem (and a proposed solution if possible) in detail. This way we can ensure that only Zabbix security team and the reporter have access to the case.
+
+The following information will be helpful for Zabbix Security team:
+
+- Date and time when you identified the security defect.
+- Affected Zabbix version range.
+- Type of security issue you are reporting, e.g.: XSS, CSRF, SQLi, RCE.
+- Affected components, e.g.: Image, Frontend, Server, Agent, API.
+- Any details you can provide, e.g. screenshots, screen recordings, http(s) transaction logs, POC exploits (please do not share any evidence via unauthenticated file sharing services and avoid sharing sensitive information, as if Zabbix Security team decides that this issue does not fit Security defect description it might be moved to ZBX project and the issue will be visible to all users).
+- Step by step instructions to reproduce the issue as the problem might not be easily identifiable.
+
+# How Zabbix deals with reported security issues:
+
+1. Zabbix Security team reviews the issue and evaluates its potential impact.
+2. If the security issue is found not to be related to security then the issue will be moved to ZBX project.
+3. Zabbix security team works on the issue to provide a solution and keeps all details on the problem until the next version of Zabbix is out.
+4. New images are created and made available for download on [https://www.zabbix.com/container_images](https://www.zabbix.com/container_images), [https://hub.docker.com/u/zabbix](http://hub.docker.com/r/zabbix/) and [Red Hat Certified Container Catalog](https://catalog.redhat.com/software/containers/search?vendor_name=Zabbix%20Sia&p=1)
+5. Zabbix requests [CVE identifiers](https://cve.mitre.org/) for the security issue.
+6. Clients with valid support agreements are emailed giving a period of time when it is possible to upgrade before the issue becomes known to the public.
+7. A public announcement for the community is made.
diff --git a/build.json b/build.json
index 3a1a5ef9a..f277f1a56 100644
--- a/build.json
+++ b/build.json
@@ -8,39 +8,42 @@
"linux/s390x",
"linux/ppc64le"
],
+ "centos": [
+ "linux/amd64",
+ "linux/arm64",
+ "linux/ppc64le"
+ ],
"ol": [
"linux/amd64",
"linux/arm64"
],
+ "rhel": [
+ "X64"
+ ],
"ubuntu": [
"linux/amd64",
"linux/arm/v7",
"linux/arm64",
"linux/s390x"
- ],
- "centos": [
- "linux/amd64",
- "linux/arm64",
- "linux/ppc64le"
]
},
"os-windows": {
- "windows-2022": "ltsc2022",
- "windows-2019": "ltsc2019"
+ "windows-2019": "ltsc2019",
+ "windows-2022": "ltsc2022"
},
"components": {
- "agent": "build-mysql",
- "agent2": "build-mysql",
- "java-gateway": "build-mysql",
- "proxy-mysql": "build-mysql",
- "proxy-sqlite3": "build-sqlite3",
- "server-mysql": "build-mysql",
- "server-pgsql": "build-pgsql",
- "snmptraps": "",
- "web-apache-mysql": "build-mysql",
- "web-apache-pgsql": "build-pgsql",
- "web-nginx-mysql": "build-mysql",
- "web-nginx-pgsql": "build-mysql",
- "web-service": "build-mysql"
+ "agent": { "base": "build-mysql", "rhel": true },
+ "agent2": { "base": "build-mysql", "rhel": true },
+ "java-gateway": { "base": "build-mysql", "rhel": true },
+ "proxy-mysql": { "base": "build-mysql", "rhel": true },
+ "proxy-sqlite3": { "base": "build-sqlite3", "rhel": true },
+ "server-mysql": { "base": "build-mysql", "rhel": true },
+ "server-pgsql": { "base": "build-pgsql", "rhel": false },
+ "snmptraps": { "base": "", "rhel": true },
+ "web-apache-mysql": { "base": "build-mysql", "rhel": false },
+ "web-apache-pgsql": { "base": "build-pgsql", "rhel": false },
+ "web-nginx-mysql": { "base": "build-mysql", "rhel": true },
+ "web-nginx-pgsql": { "base": "build-mysql", "rhel": false },
+ "web-service": { "base": "build-mysql", "rhel": true }
}
-}
\ No newline at end of file
+}
diff --git a/env_vars/mysql_init/init_proxy_db.sql b/env_vars/mysql_init/init_proxy_db.sql
index ca8c6e4c8..0f01f932a 100644
--- a/env_vars/mysql_init/init_proxy_db.sql
+++ b/env_vars/mysql_init/init_proxy_db.sql
@@ -1,2 +1,2 @@
CREATE DATABASE IF NOT EXISTS `zabbix_proxy`;
-GRANT ALL ON `zabbix_proxy`.* TO 'zabbix'@'%';
\ No newline at end of file
+GRANT ALL ON `zabbix_proxy`.* TO 'zabbix'@'%';
diff --git a/kubernetes.yaml b/kubernetes.yaml
index b3cf0b3f7..6a8cb2bb2 100644
--- a/kubernetes.yaml
+++ b/kubernetes.yaml
@@ -603,12 +603,6 @@ spec:
periodSeconds: 5
timeoutSeconds: 3
failureThreshold: 40
- livenessProbe:
- tcpSocket:
- port: 10051
- timeoutSeconds: 3
- failureThreshold: 3
- periodSeconds: 10
securityContext:
capabilities: {}
privileged: false