Add support for remote path

This commit is contained in:
Frederik Ring
2022-12-24 10:30:12 +01:00
parent c0eff2e14f
commit 1fa0548756
5 changed files with 12 additions and 7 deletions

View File

@@ -66,6 +66,7 @@ type Config struct {
AzureStorageAccountName string `split_words:"true"` AzureStorageAccountName string `split_words:"true"`
AzureStoragePrimaryAccountKey string `split_words:"true"` AzureStoragePrimaryAccountKey string `split_words:"true"`
AzureStorageContainerName string `split_words:"true"` AzureStorageContainerName string `split_words:"true"`
AzureStoragePath string `split_words:"true"`
AzureStorageEndpoint string `split_words:"true" default:"https://{{ .AccountName }}.blob.core.windows.net/"` AzureStorageEndpoint string `split_words:"true" default:"https://{{ .AccountName }}.blob.core.windows.net/"`
} }

View File

@@ -197,6 +197,7 @@ func newScript() (*script, error) {
AccountName: s.c.AzureStorageAccountName, AccountName: s.c.AzureStorageAccountName,
PrimaryAccountKey: s.c.AzureStoragePrimaryAccountKey, PrimaryAccountKey: s.c.AzureStoragePrimaryAccountKey,
Endpoint: s.c.AzureStorageEndpoint, Endpoint: s.c.AzureStorageEndpoint,
RemotePath: s.c.AzureStoragePath,
} }
azureBackend, err := azure.NewStorageBackend(azureConfig, logFunc) azureBackend, err := azure.NewStorageBackend(azureConfig, logFunc)
if err != nil { if err != nil {

View File

@@ -8,7 +8,7 @@ import (
"context" "context"
"fmt" "fmt"
"os" "os"
"path" "path/filepath"
"sync" "sync"
"text/template" "text/template"
"time" "time"
@@ -31,6 +31,7 @@ type Config struct {
ContainerName string ContainerName string
PrimaryAccountKey string PrimaryAccountKey string
Endpoint string Endpoint string
RemotePath string
} }
// NewStorageBackend creates and initializes a new Azure Blob Storage backend. // NewStorageBackend creates and initializes a new Azure Blob Storage backend.
@@ -57,6 +58,7 @@ func NewStorageBackend(opts Config, logFunc storage.Log) (storage.Backend, error
client: client, client: client,
containerName: opts.ContainerName, containerName: opts.ContainerName,
StorageBackend: &storage.StorageBackend{ StorageBackend: &storage.StorageBackend{
DestinationPath: opts.RemotePath,
Log: logFunc, Log: logFunc,
}, },
} }
@@ -74,11 +76,10 @@ func (b *azureBlobStorage) Copy(file string) error {
if err != nil { if err != nil {
return fmt.Errorf("(*azureBlobStorage).Copy: error opening file %s: %w", file, err) return fmt.Errorf("(*azureBlobStorage).Copy: error opening file %s: %w", file, err)
} }
_, err = b.client.UploadStream( _, err = b.client.UploadStream(
context.Background(), context.Background(),
b.containerName, b.containerName,
path.Base(file), filepath.Join(b.DestinationPath, filepath.Base(file)),
fileReader, fileReader,
nil, nil,
) )
@@ -91,8 +92,9 @@ func (b *azureBlobStorage) Copy(file string) error {
// Prune rotates away backups according to the configuration and provided // Prune rotates away backups according to the configuration and provided
// deadline for the Azure Blob storage backend. // deadline for the Azure Blob storage backend.
func (b *azureBlobStorage) Prune(deadline time.Time, pruningPrefix string) (*storage.PruneStats, error) { func (b *azureBlobStorage) Prune(deadline time.Time, pruningPrefix string) (*storage.PruneStats, error) {
lookupPrefix := filepath.Join(b.DestinationPath, pruningPrefix)
pager := b.client.NewListBlobsFlatPager(b.containerName, &container.ListBlobsFlatOptions{ pager := b.client.NewListBlobsFlatPager(b.containerName, &container.ListBlobsFlatOptions{
Prefix: &pruningPrefix, Prefix: &lookupPrefix,
}) })
var matches []string var matches []string
var totalCount uint var totalCount uint

View File

@@ -35,6 +35,7 @@ services:
AZURE_STORAGE_PRIMARY_ACCOUNT_KEY: Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw== AZURE_STORAGE_PRIMARY_ACCOUNT_KEY: Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==
AZURE_STORAGE_CONTAINER_NAME: test-container AZURE_STORAGE_CONTAINER_NAME: test-container
AZURE_STORAGE_ENDPOINT: http://storage:10000/{{ .AccountName }}/ AZURE_STORAGE_ENDPOINT: http://storage:10000/{{ .AccountName }}/
AZURE_STORAGE_PATH: 'path/to/backup'
BACKUP_FILENAME: test.tar.gz BACKUP_FILENAME: test.tar.gz
BACKUP_CRON_EXPRESSION: 0 0 5 31 2 ? BACKUP_CRON_EXPRESSION: 0 0 5 31 2 ?
BACKUP_RETENTION_DAYS: ${BACKUP_RETENTION_DAYS:-7} BACKUP_RETENTION_DAYS: ${BACKUP_RETENTION_DAYS:-7}

View File

@@ -18,7 +18,7 @@ sleep 5
expect_running_containers "3" expect_running_containers "3"
docker-compose run --rm az_cli \ docker-compose run --rm az_cli \
az storage blob download -f /dump/test.tar.gz -c test-container -n test.tar.gz az storage blob download -f /dump/test.tar.gz -c test-container -n path/to/backup/test.tar.gz
tar -xvf ./local/test.tar.gz -C /tmp && test -f /tmp/backup/app_data/offen.db tar -xvf ./local/test.tar.gz -C /tmp && test -f /tmp/backup/app_data/offen.db
pass "Found relevant files in untared remote backups." pass "Found relevant files in untared remote backups."
@@ -32,7 +32,7 @@ sleep 5
docker-compose exec backup backup docker-compose exec backup backup
docker-compose run --rm az_cli \ docker-compose run --rm az_cli \
az storage blob download -f /dump/test.tar.gz -c test-container -n test.tar.gz az storage blob download -f /dump/test.tar.gz -c test-container -n path/to/backup/test.tar.gz
test -f ./local/test.tar.gz test -f ./local/test.tar.gz
pass "Remote backups have not been deleted." pass "Remote backups have not been deleted."