diff --git a/.env.sample b/.env.sample index d8d915e..8a0ef77 100644 --- a/.env.sample +++ b/.env.sample @@ -4,11 +4,9 @@ SECRET_RESTIC_PASSWORD_VERSION=v1 COMPOSE_FILE=compose.yml -SERVER_NAME=example.com -RESTIC_HOST=minio.example.com +RESTIC_REPO=/backups/restic CRON_SCHEDULE='*/5 * * * *' -REMOVE_BACKUP_VOLUME_AFTER_UPLOAD=1 # swarm-cronjob, instead of built-in cron #COMPOSE_FILE="$COMPOSE_FILE:compose.swarm-cronjob.yml" diff --git a/abra.sh b/abra.sh index 9c54329..b6a1153 100644 --- a/abra.sh +++ b/abra.sh @@ -1,2 +1,2 @@ export ENTRYPOINT_VERSION=v1 -export BACKUP_VERSION=v1 +export BACKUPBOT_VERSION=v1 diff --git a/backupbot.py b/backupbot.py new file mode 100755 index 0000000..0fc12be --- /dev/null +++ b/backupbot.py @@ -0,0 +1,154 @@ +#!/usr/bin/python3 + +import os +import click +import json +import subprocess +# todo json logging +import logging +import docker +import restic +from restic.errors import ResticFailedError +from pathlib import Path +logging.basicConfig(level=logging.INFO) + +VOLUME_PATH = "/var/lib/docker/volumes/" +SERVICE = None + +@click.group() +@click.option('-l', '--log', 'loglevel') +@click.option('service', '--host', '-h', envvar='SERVICE') +def cli(loglevel, service): + global SERVICE + if service: + SERVICE = service.replace('.','_') + if loglevel: + numeric_level = getattr(logging, loglevel.upper(), None) + if not isinstance(numeric_level, int): + raise ValueError('Invalid log level: %s' % loglevel) + logging.basicConfig(level=numeric_level) + init_repo() + + +def init_repo(): + export_secrets() + restic.repository = os.environ['RESTIC_REPO'] + restic.password_file = '/var/run/secrets/restic_password' + try: + restic.cat.config() + except ResticFailedError as error: + if 'unable to open config file' in str(error): + result = restic.init() + logging.info(f"Initialized restic repo: {result}") + else: + raise error + +def export_secrets(): + for env in os.environ: + if env.endswith('PASSWORD_FILE') or env.endswith('KEY_FILE'): + logging.debug(f"exported secret: {env}") + with open(os.environ[env]) as file: + os.environ[env.removesuffix('_FILE')] = file.read() + +@cli.command() +def create(): + pre_commands, post_commands, backup_paths, apps = get_backup_cmds() + run_commands(pre_commands) + backup_volumes(backup_paths, apps) + run_commands(post_commands) + +def get_backup_cmds(): + client = docker.from_env() + containers = dict(map(lambda c: ( + c.labels['com.docker.swarm.service.name'], c), client.containers.list())) + backup_paths = set() + backup_apps = set() + pre_commands = {} + post_commands = {} + services = client.services.list() + for s in services: + labels = s.attrs['Spec']['Labels'] + if (backup := labels.get('backupbot.backup')) and bool(backup): + stack_name = labels['com.docker.stack.namespace'] + if SERVICE and SERVICE != stack_name: + continue + backup_apps.add(stack_name) + container = containers[s.name] + if prehook:= labels.get('backupbot.backup.pre-hook'): + pre_commands[container] = prehook + if posthook:= labels.get('backupbot.backup.post-hook'): + post_commands[container] = posthook + backup_paths = backup_paths.union( + Path(VOLUME_PATH).glob(f"{stack_name}_*")) + return pre_commands, post_commands, list(backup_paths), list(backup_apps) + +def run_commands(commands): + for container, command in commands.items(): + if not command: + continue + # Use bash's pipefail to return exit codes inside a pipe to prevent silent failure + command = command.removeprefix('bash -c \'').removeprefix('sh -c \'') + command = command.removesuffix('\'') + command = f"bash -c 'set -o pipefail;{command}'" + result = container.exec_run(command) + logging.info(f"run command in {container.name}") + logging.info(command) + if result.exit_code: + logging.error(result.output.decode()) + else: + logging.info(result.output.decode()) + +def backup_volumes(backup_paths, apps, dry_run=False): + result = restic.backup(backup_paths, dry_run=dry_run, tags=apps) + logging.info(result) + +@cli.command() +@click.option('snapshot', '--snapshot', '-s', envvar='SNAPSHOT', required=True) +def restore(snapshot): + service_paths = f'/var/lib/docker/volumes/{SERVICE}_*' + result = restic.restore(snapshot_id=snapshot, include=service_paths, target_dir='/') + + +@cli.command() +def snapshots(): + snapshots = restic.snapshots() + for snap in snapshots: + if not SERVICE or (tags:= snap.get('tags')) and SERVICE in tags: + print(snap['time'], snap['id']) + +@cli.command() +@click.option('snapshot', '--snapshot', '-s', envvar='SNAPSHOT', required=True) +@click.option('path', '--path', '-p', envvar='INCLUDE_PATH') +def ls(snapshot, path): + results = list_files(snapshot, path) + for r in results: + if r.get('path'): + print(f"{r['ctime']}\t{r['path']}") + +def list_files(snapshot, path): + cmd = restic.cat.base_command() + ['ls', snapshot] + if path: + cmd.append(path) + output = restic.internal.command_executor.execute(cmd) + output = output.replace('}\n{', '}|{') + results = list(map(json.loads, output.split('|'))) + return results + +@cli.command() +@click.option('snapshot', '--snapshot', '-s', envvar='SNAPSHOT', required=True) +@click.option('path', '--path', '-p', envvar='INCLUDE_PATH') +def download(snapshot, path): + files = list_files(snapshot, path) + filetype = [f.get('type') for f in files if f.get('path') == path][0] + cmd = restic.cat.base_command() + ['dump', snapshot, path] + output = subprocess.run(cmd, capture_output=True).stdout + filename = "/tmp/" + Path(path).name + if filetype == 'dir': + filename = filename + ".tar" + with open(filename, "wb") as file: + file.write(output) + print(filename) + + +if __name__ == '__main__': + cli() diff --git a/compose.yml b/compose.yml index 0b13631..56c909a 100644 --- a/compose.yml +++ b/compose.yml @@ -6,14 +6,11 @@ services: volumes: - "/var/run/docker.sock:/var/run/docker.sock" - "/var/lib/docker/volumes/:/var/lib/docker/volumes/:ro" + - backups:/backups environment: - CRON_SCHEDULE - RESTIC_REPO - RESTIC_PASSWORD_FILE=/run/secrets/restic_password - - BACKUP_DEST=/backups - - RESTIC_HOST - - SERVER_NAME - - REMOVE_BACKUP_VOLUME_AFTER_UPLOAD=1 secrets: - restic_password deploy: @@ -23,8 +20,8 @@ services: - source: entrypoint target: /entrypoint.sh mode: 0555 - - source: backup - target: /backup.sh + - source: backupbot + target: /backup mode: 0555 entrypoint: ['/entrypoint.sh'] @@ -32,11 +29,14 @@ secrets: restic_password: external: true name: ${STACK_NAME}_restic_password_${SECRET_RESTIC_PASSWORD_VERSION} + +volumes: + backups: configs: entrypoint: name: ${STACK_NAME}_entrypoint_${ENTRYPOINT_VERSION} file: entrypoint.sh - backup: - name: ${STACK_NAME}_backup_${BACKUP_VERSION} - file: backup.sh + backupbot: + name: ${STACK_NAME}_backupbot_${BACKUPBOT_VERSION} + file: backupbot.py diff --git a/entrypoint.sh b/entrypoint.sh index 6de4aa8..94ba3c3 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -1,12 +1,15 @@ #!/bin/sh -set -e +set -eu -apk add --upgrade --no-cache bash curl jq restic +apk add --upgrade --no-cache bash curl jq restic python3 py3-pip + +# Todo use requirements file +pip install click docker resticpy cron_schedule="${CRON_SCHEDULE:?CRON_SCHEDULE not set}" -echo "$cron_schedule /backup.sh" | crontab - +#echo "$cron_schedule /backupbot.py" | crontab - crontab -l crond -f -d8 -L /dev/stdout