Skip to content

Build and push Docker images #1294

Build and push Docker images

Build and push Docker images #1294

name: Build and push Docker images
on:
# Build images daily.
schedule:
- cron: 0 0 * * *
push:
branches:
- master
paths:
- .dockerignore
- .github/scripts/**
- .github/workflows/build-and-push-image.yaml
- .github/workflows/build-docker-images.yaml
- .github/workflows/build-custom-docker-images.yaml
- docker/**
pull_request:
branches:
- master
paths:
- .dockerignore
- .github/scripts/**
- .github/workflows/build-and-push-image.yaml
- .github/workflows/build-docker-images.yaml
- .github/workflows/build-custom-docker-images.yaml
- docker/**
workflow_dispatch:
inputs:
force_rebuild:
type: boolean
description: Force rebuild (even if images for the latest commit already exist)
default: false
jobs:
setup:
name: Setup
runs-on: ubuntu-24.04
permissions:
contents: read
packages: read
outputs:
commit_hash: ${{ steps.prepare-default-build.outputs.commit_hash }}
images_already_exist: ${{ steps.prepare-default-build.outputs.images_already_exist }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
# If the original MariaDB entrypoint script changes, we can't guarantee
# that our custom version (which relies on functions defined in the
# original) will still work, so we fail the workflow run at this point.
- name: Check if MariaDB entrypoint script has been updated
id: mariadb-entrypoint-check
env:
MARIADB_ENTRYPOINT_KNOWN_URL: https://raw.githubusercontent.com/MariaDB/mariadb-docker/66972bc2e0c6fd33dcd5d98de0653c5696a1166e/11.8/docker-entrypoint.sh
MARIADB_ENTRYPOINT_LATEST_URL: https://raw.githubusercontent.com/MariaDB/mariadb-docker/master/11.8/docker-entrypoint.sh
run: ./.github/scripts/check-mariadb-entrypoint.sh
# We check if the workflow was triggered by a schedule and if today is
# Monday, in which case we want to trigger a (re)build in any case (to
# keep the Docker images up-to-date).
#
# We also check if the "force rebuild" input is checked when the workflow
# is triggered manually; if yes, we also want to (re)build.
- name: Prepare default build
id: prepare-default-build
env:
GH_TOKEN: ${{ github.token }}
GITHUB_EVENT_NAME: ${{ github.event_name }}
PACKAGE_OWNER: ${{ github.repository_owner }}
PACKAGE_NAME: vmangos-database
VMANGOS_REPOSITORY_OWNER: vmangos
VMANGOS_REPOSITORY_NAME: core
VMANGOS_REVISION: development
FORCE_REBUILD: ${{ inputs.force_rebuild }}
run: ./.github/scripts/prepare-default-build.sh
build-and-push-server-images:
name: Build and push server images
needs: setup
if: ${{ needs.setup.outputs.images_already_exist != 'true' }}
strategy:
fail-fast: true
matrix:
client_version: [5875, 5464, 5302, 5086, 4878, 4695, 4544, 4449]
permissions:
contents: read
packages: write
uses: ./.github/workflows/build-and-push-image.yaml
with:
workflow_mode: default
image_kind: server
architectures: both
push_images: ${{ github.event_name != 'pull_request' }}
commit_hash: ${{ needs.setup.outputs.commit_hash }}
client_version: ${{ matrix.client_version }}
# We build database images only after the server build succeeds; otherwise, a
# failure there could leave the latest database images out of sync.
build-and-push-database-images:
name: Build and push database images
needs: [setup, build-and-push-server-images]
if: ${{ needs.setup.outputs.images_already_exist != 'true' }}
permissions:
contents: read
packages: write
uses: ./.github/workflows/build-and-push-image.yaml
with:
workflow_mode: default
image_kind: database
architectures: both
push_images: ${{ github.event_name != 'pull_request' }}
commit_hash: ${{ needs.setup.outputs.commit_hash }}
delete-old-package-versions:
name: Delete old package versions
needs: [setup, build-and-push-server-images, build-and-push-database-images]
if: ${{ needs.setup.outputs.images_already_exist != 'true' && github.event_name != 'pull_request' }}
runs-on: ubuntu-24.04
permissions:
contents: read
packages: write
steps:
- name: Delete old server package versions
uses: actions/delete-package-versions@v5
with:
package-name: vmangos-server
package-type: container
# According to
# https://docs.github.com/en/enterprise-server@3.11/packages/working-with-a-github-packages-registry/working-with-the-npm-registry#limits-for-published-npm-versions
# there might at some point be a limit of 1,000 versions per package.
# It is not clear if that will only be applied to Enterprise and/or
# npm packages, but let's be safe and make sure we don't keep more
# than 1,000.
#
# We have 8 different server versions per build. For each server
# version, we have the image index plus two image manifests, making a
# total of 24 packages.
#
# This means, to stay below the assumed limit of 1,000 packages, we
# can keep a maximum of 41 builds worth of server images; let's make
# that 40 to have a nice and round number:
# 24 * 40 = 960 packages
min-versions-to-keep: 960
- name: Delete old database package versions
uses: actions/delete-package-versions@v5
with:
package-name: vmangos-database
package-type: container
# Above, we have set it up to keep 40 builds worth of server images
# (to stay below that supposed 1,000 packages limit). We want to keep
# all the matching database images.
#
# Per database build we have 3 packages in total (the image index
# plus two image manifests).
#
# 3 * 40 = 120 packages
min-versions-to-keep: 120
update-badges:
name: Update badges
needs: [setup, build-and-push-server-images, build-and-push-database-images, delete-old-package-versions]
if: ${{ needs.setup.outputs.images_already_exist != 'true' && github.event_name != 'pull_request' }}
runs-on: ubuntu-24.04
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Upload badge JSON files to web server
env:
COMMIT_HASH: ${{ needs.setup.outputs.commit_hash }}
BADGES_FTP_HOST: ${{ secrets.BADGES_FTP_HOST }}
BADGES_FTP_USERNAME: ${{ secrets.BADGES_FTP_USERNAME }}
BADGES_FTP_PASSWORD: ${{ secrets.BADGES_FTP_PASSWORD }}
run: ./.github/scripts/upload-badges.sh