Add sql dump in backup script
This commit is contained in:
parent
b335a0c9c0
commit
46668163b7
73
backup.py
73
backup.py
@ -15,7 +15,8 @@ from datetime import datetime
|
|||||||
|
|
||||||
|
|
||||||
COMPOSE_PATH = Path('docker-compose.yml')
|
COMPOSE_PATH = Path('docker-compose.yml')
|
||||||
BACKUP_PATH = Path('./backup/docker_named_volumes')
|
BACKUP_VOL_PATH = Path('./backup/docker_named_volumes')
|
||||||
|
BACKUP_SQL_PATH = Path('./backup/docker_sql_dumps')
|
||||||
VOLUME_PREFIX = 'docker_'
|
VOLUME_PREFIX = 'docker_'
|
||||||
|
|
||||||
|
|
||||||
@ -53,6 +54,9 @@ def build_services_graph(services):
|
|||||||
def group_services(services, graph):
|
def group_services(services, graph):
|
||||||
grouped_services = {}
|
grouped_services = {}
|
||||||
for service in services:
|
for service in services:
|
||||||
|
# Do not include if SQL
|
||||||
|
if is_sql(services[service]):
|
||||||
|
continue
|
||||||
root = graph.find(service)
|
root = graph.find(service)
|
||||||
if root not in grouped_services:
|
if root not in grouped_services:
|
||||||
grouped_services[root] = {'services': []}
|
grouped_services[root] = {'services': []}
|
||||||
@ -70,27 +74,28 @@ def group_volumes(services, volumes, services_group):
|
|||||||
|
|
||||||
return services_group
|
return services_group
|
||||||
|
|
||||||
def backup(volume):
|
|
||||||
|
def is_sql(service):
|
||||||
|
return 'postgres' in service['image']
|
||||||
|
|
||||||
|
|
||||||
|
def get_date_string():
|
||||||
current_date = datetime.now()
|
current_date = datetime.now()
|
||||||
date_string = current_date.strftime("%Y-%m-%d")
|
return current_date.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
|
||||||
|
def backup_named_volume(volume):
|
||||||
|
date_string = get_date_string()
|
||||||
archive_name = f'{date_string}_{volume}.tar'
|
archive_name = f'{date_string}_{volume}.tar'
|
||||||
print(f'backup volume {volume} to {BACKUP_PATH}/{archive_name}')
|
print(f'backup volume {volume} to {BACKUP_VOL_PATH}/{archive_name}')
|
||||||
|
|
||||||
subprocess.run(f'docker run --rm --volume {VOLUME_PREFIX}{volume}:/data --volume {BACKUP_PATH.resolve()}:/bkp ubuntu tar -cf /bkp/{archive_name} -C /data .'.split())
|
subprocess.run(f'docker run --rm --volume {VOLUME_PREFIX}{volume}:/data --volume {BACKUP_VOL_PATH.resolve()}:/bkp ubuntu tar -cf /bkp/{archive_name} -C /data .'.split())
|
||||||
|
|
||||||
|
|
||||||
def run_docker_compose(cmd):
|
def run_docker_compose(cmd): subprocess.run(f'docker compose {cmd}'.split())
|
||||||
subprocess.run(f'docker compose {cmd}'.split())
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
with COMPOSE_PATH.open() as cf:
|
|
||||||
compose = yaml.safe_load(cf)
|
|
||||||
|
|
||||||
services = compose['services']
|
|
||||||
volumes = compose['volumes']
|
|
||||||
|
|
||||||
|
|
||||||
|
def backup_named_volumes(services, volumes):
|
||||||
services_graph = build_services_graph(services)
|
services_graph = build_services_graph(services)
|
||||||
services_group = group_services(services, services_graph)
|
services_group = group_services(services, services_graph)
|
||||||
services_group = group_volumes(services, volumes, services_group)
|
services_group = group_volumes(services, volumes, services_group)
|
||||||
@ -106,5 +111,41 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
run_docker_compose(f'stop {" ".join(group["services"])}')
|
run_docker_compose(f'stop {" ".join(group["services"])}')
|
||||||
for volume in group['volumes']:
|
for volume in group['volumes']:
|
||||||
backup(volume)
|
backup_named_volume(volume)
|
||||||
run_docker_compose(f'start {" ".join(group["services"])}')
|
run_docker_compose(f'start {" ".join(group["services"])}')
|
||||||
|
|
||||||
|
|
||||||
|
def backup_sql_dumps(services, volumes):
|
||||||
|
for service_name, service in services.items():
|
||||||
|
if not is_sql(service):
|
||||||
|
continue
|
||||||
|
#print(f'Service {service_name} run sql dump...')
|
||||||
|
backup_sql_dump(service_name, service)
|
||||||
|
|
||||||
|
|
||||||
|
def backup_sql_dump(service_name, service):
|
||||||
|
date_string = get_date_string()
|
||||||
|
dump_name = f'{date_string}_{service_name}.sql'
|
||||||
|
dump_path = BACKUP_SQL_PATH / dump_name
|
||||||
|
|
||||||
|
dump_path.parent.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
user = service['environment']['POSTGRES_USER']
|
||||||
|
db = service['environment']['POSTGRES_DB']
|
||||||
|
|
||||||
|
print(f'Service {service_name} dump database to {dump_path}')
|
||||||
|
|
||||||
|
with dump_path.open('wb') as f:
|
||||||
|
subprocess.run(f'docker compose exec -T {service_name} pg_dump -U {user} {db}'.split(),
|
||||||
|
check=True,
|
||||||
|
stdout=f)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
with COMPOSE_PATH.open() as cf:
|
||||||
|
compose = yaml.safe_load(cf)
|
||||||
|
|
||||||
|
services = compose['services']
|
||||||
|
volumes = compose['volumes']
|
||||||
|
|
||||||
|
backup_named_volumes(services, volumes)
|
||||||
|
backup_sql_dumps(services, volumes)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user