#!/usr/bin/python3

import os
import click
import json
import subprocess
import logging
import docker
import restic
from restic.errors import ResticFailedError
from pathlib import Path
#logging.basicConfig(level=logging.INFO)

VOLUME_PATH = "/var/lib/docker/volumes/"
SERVICE = None

@click.group()
@click.option('-l', '--log', 'loglevel')
@click.option('service', '--host', '-h', envvar='SERVICE')
@click.option('repository', '--repo', '-r', envvar='RESTIC_REPO', required=True)
def cli(loglevel, service, repository):
    global SERVICE
    if service:
        SERVICE = service.replace('.','_')
    if loglevel:
        numeric_level = getattr(logging, loglevel.upper(), None)
        if not isinstance(numeric_level, int):
            raise ValueError('Invalid log level: %s' % loglevel)
        logging.basicConfig(level=numeric_level)
    export_secrets()
    init_repo(repository)


def init_repo(repository):
    restic.repository = repository
    restic.password_file = '/var/run/secrets/restic_password'
    try:
        restic.cat.config()
    except ResticFailedError as error:
        if 'unable to open config file' in str(error):
            result = restic.init()
            logging.info(f"Initialized restic repo: {result}")
        else:
            raise error

def export_secrets():
    for env in os.environ:
        if env.endswith('PASSWORD_FILE') or env.endswith('KEY_FILE'):
            logging.debug(f"exported secret: {env}")
            with open(os.environ[env]) as file:
                os.environ[env.removesuffix('_FILE')] = file.read()

@cli.command(help='Attach all secrets to the backupbot container, this can result in a container restart')
def attach_secrets():
    client = docker.from_env()
    services = client.services.list()
    apps = []
    secrets = []
    secret_ids = []
    # Get all Apps that aktivate backups
    for s in services:
        labels = s.attrs['Spec']['Labels']
        if (backup := labels.get('backupbot.backup')) and bool(backup):
            apps.append(labels['com.docker.stack.namespace'])
    # Get all Secrets for these Apps
    for s in services:
        labels = s.attrs['Spec']['Labels']
        if labels['com.docker.stack.namespace'] in apps:
            if app_secs:= s.attrs['Spec']['TaskTemplate']['ContainerSpec'].get('Secrets'):
                for sec in app_secs:
                    if sec['SecretID'] not in secret_ids:
                        # Move Secret Targets to SecretName to avoid conflicts
                        secret_ids.append(sec['SecretID'])
                        sec['File']['Name'] = sec['SecretName']
                        secrets.append(sec)
    backupbot_service = client.services.get(os.environ['STACK_NAME']+"_app")
    # Append the backupbot secrets
    backupbot_secrets = backupbot_service.attrs['Spec']['TaskTemplate']['ContainerSpec']['Secrets']
    for sec in backupbot_secrets:
        if os.environ['STACK_NAME'] in sec['SecretName']:
            secrets.append(sec)
    new_sec_ids = set(map(lambda s: s['SecretID'], secrets))
    old_sec_ids = set(map(lambda s: s['SecretID'], backupbot_secrets))
    if new_sec_ids.difference(old_sec_ids):
        logging.warning("Backupbot will restart to update the secrets")
    backupbot_service.update(secrets=secrets)

@cli.command()
def create():
    pre_commands, post_commands, backup_paths, apps = get_backup_cmds()
    run_commands(pre_commands)
    backup_volumes(backup_paths, apps)
    run_commands(post_commands)

def get_backup_cmds():
    client = docker.from_env()
    containers = dict(map(lambda c: (
        c.labels['com.docker.swarm.service.name'], c), client.containers.list()))
    backup_paths = set()
    backup_apps = set()
    pre_commands = {}
    post_commands = {}
    services = client.services.list()
    for s in services:
        labels = s.attrs['Spec']['Labels']
        if (backup := labels.get('backupbot.backup')) and bool(backup):
            stack_name = labels['com.docker.stack.namespace']
            if SERVICE and SERVICE != stack_name:
                continue
            backup_apps.add(stack_name)
            container = containers[s.name]
            if prehook:= labels.get('backupbot.backup.pre-hook'):
                pre_commands[container] = prehook
            if posthook:= labels.get('backupbot.backup.post-hook'):
                post_commands[container] = posthook
            # Backup volumes
            backup_paths = backup_paths.union(
                Path(VOLUME_PATH).glob(f"{stack_name}_*"))
            # Backup secrets
            backup_paths = backup_paths.union(
                Path('/var/run/secrets').glob(f"{stack_name}_*"))
    return pre_commands, post_commands, list(backup_paths), list(backup_apps)

def run_commands(commands):
    for container, command in commands.items():
        if not command:
            continue
        # Use bash's pipefail to return exit codes inside a pipe to prevent silent failure
        command = command.removeprefix('bash -c \'').removeprefix('sh -c \'')
        command = command.removesuffix('\'')
        command = f"bash -c 'set -o pipefail;{command}'"
        result = container.exec_run(command)
        logging.info(f"run command in {container.name}")
        logging.info(command)
        if result.exit_code:
            logging.error(f"Failed to run command {command} in {container.name}: {result.output.decode()}")
        else:
            logging.info(result.output.decode())

def backup_volumes(backup_paths, apps, dry_run=False):
    result = restic.backup(backup_paths, dry_run=dry_run, tags=apps)
    print(result)
    logging.info(result)

@cli.command()
@click.option('snapshot', '--snapshot', '-s', envvar='SNAPSHOT', default='latest')
@click.option('target', '--target', '-t', envvar='TARGET', default='/')
def restore(snapshot, target):
    # Todo: recommend to shutdown the container
    service_paths = VOLUME_PATH
    if SERVICE:
        service_paths = service_paths + f'{SERVICE}_*'
    print(f"restoring Snapshot {snapshot} of {service_paths} at {target}")
    result = restic.restore(snapshot_id=snapshot, include=service_paths, target_dir=target)
    logging.debug(result)


@cli.command()
def snapshots():
    snapshots = restic.snapshots()
    for snap in snapshots:
        if not SERVICE or (tags:= snap.get('tags')) and SERVICE in tags:
            print(snap['time'], snap['id'])

@cli.command()
@click.option('snapshot', '--snapshot', '-s', envvar='SNAPSHOT', default='latest')
@click.option('path', '--path', '-p', envvar='INCLUDE_PATH')
def ls(snapshot, path):
    results = list_files(snapshot, path)
    for r in results:
        if r.get('path'):
            print(f"{r['ctime']}\t{r['path']}")

def list_files(snapshot, path):
    cmd = restic.cat.base_command() + ['ls', snapshot]
    if path:
        cmd.append(path)
    output = restic.internal.command_executor.execute(cmd)
    output = output.replace('}\n{', '}|{')
    results = list(map(json.loads, output.split('|')))
    return results

@cli.command()
@click.option('snapshot', '--snapshot', '-s', envvar='SNAPSHOT', default='latest')
@click.option('path', '--path', '-p', envvar='INCLUDE_PATH')
def download(snapshot, path):
    path = path.removesuffix('/')
    files = list_files(snapshot, path)
    filetype = [f.get('type') for f in files if f.get('path') == path][0]
    cmd = restic.cat.base_command() + ['dump', snapshot, path]
    output = subprocess.run(cmd, capture_output=True).stdout
    filename = "/tmp/" + Path(path).name
    if filetype == 'dir':
        filename = filename + ".tar"
    with open(filename, "wb") as file:
        file.write(output)
    print(filename)


if __name__ == '__main__':
    cli()