mirror of
https://github.com/offen/docker-volume-backup.git
synced 2025-12-05 17:18:02 +01:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84afc43fd8 | ||
|
|
1af345061c | ||
|
|
5368eb8c5e | ||
|
|
5978a897ad |
18
README.md
18
README.md
@@ -38,11 +38,29 @@ AWS_S3_BUCKET_NAME="<xxx>"
|
||||
|
||||
# BACKUP_RETENTION_DAYS="7"
|
||||
|
||||
# In case the duration a backup takes fluctuates noticeably in your setup
|
||||
# you can adjust this setting to make sure there are no race conditions
|
||||
# between the backup finishing and the pruning not deleting backups that
|
||||
# sit on the very edge of the time window. Set this value to a duration
|
||||
# that is expected to be bigger than the maximum difference of backups.
|
||||
# Valid values have a suffix of (s)econds, (m)inutes, (h)ours, or (d)ays.
|
||||
|
||||
# BACKUP_PRUNING_LEEWAY="10m"
|
||||
|
||||
########### BACKUP ENCRYPTION
|
||||
|
||||
# Backups can be encrypted using gpg in case a passphrase is given
|
||||
|
||||
# GPG_PASSPHRASE="<xxx>"
|
||||
|
||||
########### MINIO CLIENT CONFIGURATION
|
||||
|
||||
# Pass these additional flags to all MinIO client `mc` invocations.
|
||||
# This can be used for example to pass `--insecure` when using self
|
||||
# signed certificates, or passing `--debug` to gain insights on
|
||||
# unexpected behavior.
|
||||
|
||||
# MC_GLOBAL_OPTIONS="<xxx>"
|
||||
```
|
||||
|
||||
## Example in a docker-compose setup
|
||||
|
||||
@@ -58,7 +58,7 @@ fi
|
||||
if [ ! -z "$AWS_S3_BUCKET_NAME" ]; then
|
||||
info "Uploading backup to remote storage"
|
||||
echo "Will upload to bucket \"$AWS_S3_BUCKET_NAME\"."
|
||||
mc cp "$BACKUP_FILENAME" "backup-target/$AWS_S3_BUCKET_NAME"
|
||||
mc cp $MC_GLOBAL_OPTIONS "$BACKUP_FILENAME" "backup-target/$AWS_S3_BUCKET_NAME"
|
||||
echo "Upload finished."
|
||||
fi
|
||||
|
||||
@@ -76,14 +76,14 @@ if [ ! -z "$BACKUP_RETENTION_DAYS" ]; then
|
||||
sleep "$BACKUP_PRUNING_LEEWAY"
|
||||
bucket=$AWS_S3_BUCKET_NAME
|
||||
|
||||
rule_applies_to=$(mc rm --fake --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket" | wc -l)
|
||||
rule_applies_to=$(mc rm $MC_GLOBAL_OPTIONS --fake --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket" | wc -l)
|
||||
if [ "$rule_applies_to" == "0" ]; then
|
||||
echo "No backups found older than the configured retention period of $BACKUP_RETENTION_DAYS days."
|
||||
echo "Doing nothing."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
total=$(mc ls "backup-target/$bucket" | wc -l)
|
||||
total=$(mc ls $MC_GLOBAL_OPTIONS "backup-target/$bucket" | wc -l)
|
||||
|
||||
if [ "$rule_applies_to" == "$total" ]; then
|
||||
echo "Using a retention of ${BACKUP_RETENTION_DAYS} days would prune all currently existing backups, will not continue."
|
||||
@@ -91,6 +91,6 @@ if [ ! -z "$BACKUP_RETENTION_DAYS" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mc rm --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket"
|
||||
mc rm $MC_GLOBAL_OPTIONS --recursive -force --older-than "${BACKUP_RETENTION_DAYS}d" "backup-target/$bucket"
|
||||
echo "Successfully pruned ${rule_applies_to} backups older than ${BACKUP_RETENTION_DAYS} days."
|
||||
fi
|
||||
|
||||
@@ -21,11 +21,13 @@ AWS_S3_BUCKET_NAME="${AWS_S3_BUCKET_NAME:-}"
|
||||
AWS_ENDPOINT="${AWS_ENDPOINT:-s3.amazonaws.com}"
|
||||
|
||||
GPG_PASSPHRASE="${GPG_PASSPHRASE:-}"
|
||||
|
||||
MC_GLOBAL_OPTIONS="${MC_GLOBAL_OPTIONS:-}"
|
||||
EOF
|
||||
chmod a+x env.sh
|
||||
source env.sh
|
||||
|
||||
mc alias set backup-target "https://$AWS_ENDPOINT" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY"
|
||||
mc $MC_GLOBAL_OPTIONS alias set backup-target "https://$AWS_ENDPOINT" "$AWS_ACCESS_KEY_ID" "$AWS_SECRET_ACCESS_KEY"
|
||||
|
||||
# Add our cron entry, and direct stdout & stderr to Docker commands stdout
|
||||
echo "Installing cron.d entry with expression $BACKUP_CRON_EXPRESSION."
|
||||
|
||||
Reference in New Issue
Block a user