Files
ci-workflows/.github/workflows/docker-build-publish.yml
2026-01-18 11:25:34 +01:00

315 lines
11 KiB
YAML

name: Docker Build, Scan & Publish
on:
workflow_call:
inputs:
images:
description: >
JSON array of images to build.
Each item: { name, context, dockerfile, target, cache_ref }
required: true
type: string
registry_host:
required: true
type: string
build_args:
required: false
type: string
default: ""
description: >
Multiline build args, one per line: KEY=VALUE (values may include spaces)
env:
description: >
Multiline env vars, one per line: KEY=VALUE
required: false
type: string
default: ""
trivy_severity:
required: false
type: string
default: "CRITICAL"
secrets:
registry_user:
required: true
registry_password:
required: true
ci_token:
required: true
ssh_private_key:
required: false
ssh_known_hosts:
required: false
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Load env vars
if: ${{ inputs.env != '' }}
run: |
while IFS= read -r line; do
[ -z "$line" ] && continue
case "$line" in \#*) continue;; esac
if [[ "$line" != *=* ]]; then
echo "Invalid env line: $line" >&2
exit 1
fi
echo "$line" >> "$GITHUB_ENV"
done <<< "${{ inputs.env }}"
- name: Start ssh-agent
if: ${{ secrets.ssh_private_key != '' }}
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.ssh_private_key }}
- name: Add SSH known hosts
if: ${{ secrets.ssh_known_hosts != '' }}
run: |
mkdir -p ~/.ssh
printf '%s\n' "${{ secrets.ssh_known_hosts }}" >> ~/.ssh/known_hosts
chmod 644 ~/.ssh/known_hosts
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Install Trivy
run: |
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b /usr/local/bin
trivy --version
- name: Login to registry
uses: docker/login-action@v3
with:
registry: ${{ inputs.registry_host }}
username: ${{ secrets.registry_user }}
password: ${{ secrets.registry_password }}
- name: Build, scan and push images
env:
IMAGES: ${{ inputs.images }}
BUILD_ARGS: ${{ inputs.build_args }}
CI_TOKEN: ${{ secrets.ci_token }}
TRIVY_SEVERITY: ${{ inputs.trivy_severity }}
run: |
set -euo pipefail
if ! echo "$IMAGES" | jq -e . >/dev/null; then
echo "inputs.images must be valid JSON" >&2
exit 1
fi
if ! echo "$IMAGES" | jq -e 'type == "array"' >/dev/null; then
echo "inputs.images must be a JSON array" >&2
exit 1
fi
SSH_FLAGS=()
if [ -n "${SSH_AUTH_SOCK:-}" ]; then
SSH_FLAGS+=(--ssh default)
fi
SSH_BAKE_JSON="null"
if [ -n "${SSH_AUTH_SOCK:-}" ]; then
SSH_BAKE_JSON='["default"]'
fi
BAKE_ALLOW_FLAGS=()
if [ -n "${SSH_AUTH_SOCK:-}" ]; then
BAKE_ALLOW_FLAGS+=(--allow=ssh)
fi
RAW_REF="${{ github.ref }}"
SHA_FULL="${{ github.sha }}"
SHA_SHORT="${SHA_FULL:0:12}"
VERSION_TAGS=()
if [[ "$RAW_REF" =~ ^refs/tags/v([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
VERSION_TAGS+=("v${BASH_REMATCH[1]}.${BASH_REMATCH[2]}.${BASH_REMATCH[3]}")
VERSION_TAGS+=("v${BASH_REMATCH[1]}.${BASH_REMATCH[2]}")
VERSION_TAGS+=("v${BASH_REMATCH[1]}")
VERSION_TAGS+=("latest")
fi
BUILD_ARG_FLAGS=()
BUILD_ARGS_JSON="{}"
if [ -n "$BUILD_ARGS" ]; then
while IFS= read -r line; do
trimmed="${line#"${line%%[![:space:]]*}"}"
trimmed="${trimmed%"${trimmed##*[![:space:]]}"}"
[ -z "$trimmed" ] && continue
case "$trimmed" in \#*) continue;; esac
if [[ "$trimmed" != *=* ]]; then
echo "Invalid build arg: $trimmed" >&2
exit 1
fi
BUILD_ARG_FLAGS+=(--build-arg "$trimmed")
key="${trimmed%%=*}"
val="${trimmed#*=}"
BUILD_ARGS_JSON=$(jq --arg k "$key" --arg v "$val" '. + {($k): $v}' <<<"$BUILD_ARGS_JSON")
done <<< "$BUILD_ARGS"
fi
normalize_path() {
local p="$1"
while [[ "$p" == ./* ]]; do
p="${p#./}"
done
if [ "$p" != "/" ]; then
p="${p%/}"
fi
printf '%s' "$p"
}
while read -r group; do
GROUP_COUNT=$(echo "$group" | jq 'length')
CONTEXT=$(echo "$group" | jq -r '.[0].context')
DOCKERFILE=$(echo "$group" | jq -r '.[0].dockerfile')
context_norm="$(normalize_path "$CONTEXT")"
dockerfile_norm="$(normalize_path "$DOCKERFILE")"
DOCKERFILE_FOR_BAKE="$DOCKERFILE"
DOCKERFILE_FOR_BUILD="$DOCKERFILE"
if [ -n "$context_norm" ] && [ "$context_norm" != "." ]; then
if [[ "$dockerfile_norm" == "$context_norm/"* ]]; then
DOCKERFILE_FOR_BAKE="${dockerfile_norm#$context_norm/}"
DOCKERFILE_FOR_BUILD="$dockerfile_norm"
elif [[ "$dockerfile_norm" != /* ]]; then
DOCKERFILE_FOR_BUILD="${context_norm}/${dockerfile_norm}"
fi
fi
if [ "$GROUP_COUNT" -gt 1 ]; then
echo "==== Building $GROUP_COUNT images from $DOCKERFILE (bake) ===="
CACHE_REF_SUFFIXES=$(echo "$group" | jq -c 'map(.cache_ref // empty) | map(select(length > 0)) | unique')
CACHE_REF_SUFFIX=$(echo "$CACHE_REF_SUFFIXES" | jq -r '.[0] // empty')
UNIQUE_CACHE_COUNT=$(echo "$CACHE_REF_SUFFIXES" | jq -r 'length')
if [ "$UNIQUE_CACHE_COUNT" -gt 1 ]; then
echo "Warning: multiple cache_ref values for the same dockerfile. Using $CACHE_REF_SUFFIX." >&2
fi
CACHE_REF=""
if [ -n "$CACHE_REF_SUFFIX" ]; then
CACHE_REF="${{ inputs.registry_host }}/$CACHE_REF_SUFFIX"
fi
TARGETS_JSON="{}"
TARGET_NAMES=()
while read -r entry; do
IDX=$(echo "$entry" | jq -r '.key')
IMG=$(echo "$entry" | jq -c '.value')
IMAGE_NAME=$(echo "$IMG" | jq -r '.name')
TARGET=$(echo "$IMG" | jq -r '.target // empty')
FULL_IMAGE="${{ inputs.registry_host }}/${IMAGE_NAME}"
TAGS=()
TAGS+=("$FULL_IMAGE:sha-$SHA_SHORT")
for ver in "${VERSION_TAGS[@]}"; do
TAGS+=("$FULL_IMAGE:$ver")
done
TAGS_JSON=$(printf '%s\n' "${TAGS[@]}" | jq -R . | jq -s .)
TARGET_NAME="img_${IDX}"
TARGET_NAMES+=("$TARGET_NAME")
TARGET_OBJ=$(jq -n \
--arg context "$CONTEXT" \
--arg dockerfile "$DOCKERFILE_FOR_BAKE" \
--arg target "$TARGET" \
--argjson tags "$TAGS_JSON" \
--argjson args "$BUILD_ARGS_JSON" \
--argjson ssh "$SSH_BAKE_JSON" \
--arg cache_ref "$CACHE_REF" \
'{
context: $context,
dockerfile: $dockerfile,
tags: $tags,
args: $args
}
+ (if ($ssh != null) then {ssh: $ssh} else {} end)
+ (if ($target != "" and $target != "null") then {target: $target} else {} end)
+ (if ($cache_ref != "") then {"cache-from": ["type=registry,ref=" + $cache_ref], "cache-to": ["type=registry,ref=" + $cache_ref + ",mode=max"]} else {} end)')
TARGETS_JSON=$(jq -n --arg name "$TARGET_NAME" --argjson target "$TARGET_OBJ" --argjson targets "$TARGETS_JSON" '$targets + {($name): $target}')
done < <(echo "$group" | jq -c 'to_entries[]')
GROUP_TARGETS_JSON=$(printf '%s\n' "${TARGET_NAMES[@]}" | jq -R . | jq -s .)
BAKE_JSON=$(jq -n --argjson targets "$TARGETS_JSON" --argjson group_targets "$GROUP_TARGETS_JSON" '{target:$targets, group:{default:{targets:$group_targets}}}')
BAKE_FILE=$(mktemp)
echo "$BAKE_JSON" > "$BAKE_FILE"
docker buildx bake --file "$BAKE_FILE" --push "${BAKE_ALLOW_FLAGS[@]}"
rm -f "$BAKE_FILE"
while read -r img; do
IMAGE_NAME=$(echo "$img" | jq -r '.name')
FULL_IMAGE="${{ inputs.registry_host }}/${IMAGE_NAME}"
echo "==== Trivy scan for $FULL_IMAGE ===="
trivy image \
--severity "$TRIVY_SEVERITY" \
--exit-code 1 \
"$FULL_IMAGE:sha-$SHA_SHORT"
done < <(echo "$group" | jq -c '.[]')
else
img=$(echo "$group" | jq -c '.[0]')
IMAGE_NAME=$(echo "$img" | jq -r '.name')
FULL_IMAGE="${{ inputs.registry_host }}/${IMAGE_NAME}"
CACHE_REF_SUFFIX=$(echo "$img" | jq -r '.cache_ref // empty')
CONTEXT=$(echo "$img" | jq -r '.context')
DOCKERFILE=$(echo "$img" | jq -r '.dockerfile')
TARGET=$(echo "$img" | jq -r '.target // empty')
TAGS=()
TAGS+=("$FULL_IMAGE:sha-$SHA_SHORT")
for ver in "${VERSION_TAGS[@]}"; do
TAGS+=("$FULL_IMAGE:$ver")
done
TAG_ARGS=()
for tag in "${TAGS[@]}"; do
TAG_ARGS+=(--tag "$tag")
done
TARGET_FLAGS=()
if [ -n "$TARGET" ] && [ "$TARGET" != "null" ]; then
TARGET_FLAGS+=(--target "$TARGET")
fi
CACHE_FLAGS=()
if [ -n "$CACHE_REF_SUFFIX" ]; then
CACHE_REF="${{ inputs.registry_host }}/${CACHE_REF_SUFFIX}"
CACHE_FLAGS+=(--cache-from "type=registry,ref=$CACHE_REF")
CACHE_FLAGS+=(--cache-to "type=registry,ref=$CACHE_REF,mode=max")
fi
echo "==== Building $FULL_IMAGE ===="
docker buildx build \
--file "$DOCKERFILE_FOR_BUILD" \
"${TARGET_FLAGS[@]}" \
"${CACHE_FLAGS[@]}" \
"${SSH_FLAGS[@]}" \
--push \
"${TAG_ARGS[@]}" \
"${BUILD_ARG_FLAGS[@]}" \
"$CONTEXT"
echo "==== Trivy scan for $FULL_IMAGE ===="
trivy image \
--severity "$TRIVY_SEVERITY" \
--exit-code 1 \
"${TAGS[0]}"
fi
done < <(echo "$IMAGES" | jq -c 'sort_by(.context, .dockerfile) | group_by([.context, .dockerfile])[]')