|
@@ -1,15 +1,19 @@
|
|
|
-#! /usr/bin/python
|
|
|
+#!/usr/bin/python
|
|
|
# -*- coding: utf-8 -*-
|
|
|
import os
|
|
|
import dropbox
|
|
|
import docker
|
|
|
+from dropbox.files import WriteMode
|
|
|
from dropbox.exceptions import ApiError, AuthError
|
|
|
from docker.errors import NotFound, APIError
|
|
|
+from io import BytesIO
|
|
|
import time
|
|
|
|
|
|
+BACKUP_AGE = 30
|
|
|
DOCKER_SOCK = 'unix://var/run/docker.sock'
|
|
|
POSTGRES_CONTAINER = 'postgres'
|
|
|
POSTGRES_USER = 'postgres'
|
|
|
+ODOO_IMAGE = 'odoo/robert:8.0'
|
|
|
|
|
|
'''
|
|
|
'''
|
|
@@ -49,17 +53,11 @@ def create_dropbox_folder(folder_path, dbx=None):
|
|
|
if len(result.matches) > 0:
|
|
|
return False
|
|
|
|
|
|
- folder_metadata = dbx.files_create_folder(folder_path)
|
|
|
- print(folder_metadata)
|
|
|
+ dbx.files_create_folder(folder_path)
|
|
|
return True
|
|
|
except ApiError:
|
|
|
return False
|
|
|
|
|
|
-'''
|
|
|
-'''
|
|
|
-def upload_to_dropbox(folder_path, file):
|
|
|
- pass
|
|
|
-
|
|
|
'''
|
|
|
'''
|
|
|
def get_docker_client():
|
|
@@ -97,6 +95,17 @@ def list_postgres_databases(docker_client):
|
|
|
|
|
|
return output
|
|
|
|
|
|
+'''
|
|
|
+'''
|
|
|
+def filter_databases_by_active_containers(databases, docker_client):
|
|
|
+ try:
|
|
|
+ containers = docker_client.containers.list(filters={'status': 'running', 'ancestor': ODOO_IMAGE})
|
|
|
+ containers_name = map(lambda x: x.name, containers)
|
|
|
+
|
|
|
+ return filter(lambda x: x in containers_name, databases)
|
|
|
+ except APIError:
|
|
|
+ return []
|
|
|
+
|
|
|
'''
|
|
|
'''
|
|
|
def create_postgres_backup(database, docker_client):
|
|
@@ -115,6 +124,31 @@ def create_postgres_backup(database, docker_client):
|
|
|
|
|
|
return (True, tmp_file)
|
|
|
|
|
|
+'''
|
|
|
+'''
|
|
|
+def upload_to_dropbox(backup_file_name, backup_path, docket_client, dbx):
|
|
|
+ pg_container = get_pg_container(docket_client)
|
|
|
+
|
|
|
+ if pg_container is None or pg_container.status == 'exited':
|
|
|
+ return False
|
|
|
+
|
|
|
+ try:
|
|
|
+ (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
|
|
|
+ raw_data = BytesIO()
|
|
|
+
|
|
|
+ for chunk in backup_file:
|
|
|
+ raw_data.write(chunk)
|
|
|
+
|
|
|
+ raw_data.seek(0)
|
|
|
+
|
|
|
+ remote_path = ('%s/%s') % (backup_path, backup_file_name)
|
|
|
+ dbx.files_upload(raw_data.read(), remote_path, mode=WriteMode('overwrite'))
|
|
|
+ raw_data.close()
|
|
|
+
|
|
|
+ return True
|
|
|
+ except (APIError, ApiError):
|
|
|
+ return False
|
|
|
+
|
|
|
'''
|
|
|
'''
|
|
|
def delete_backup_file(backup_name, docker_client):
|
|
@@ -143,7 +177,7 @@ def run_backup():
|
|
|
folder_path = create_folder_path()
|
|
|
|
|
|
# 3. create dropbox folder
|
|
|
- create_ok = create_dropbox_folder(folder_path, dbx)
|
|
|
+ create_dropbox_folder(folder_path, dbx)
|
|
|
|
|
|
# 4. get docker client
|
|
|
docker_client = get_docker_client()
|
|
@@ -151,7 +185,10 @@ def run_backup():
|
|
|
# 5. list database
|
|
|
databases = list_postgres_databases(docker_client)
|
|
|
|
|
|
- # 6. backup databases
|
|
|
+ # 6. filter databases by active containers
|
|
|
+ databases = filter_databases_by_active_containers(databases, docker_client)
|
|
|
+
|
|
|
+ # 7. backup databases
|
|
|
for db in databases:
|
|
|
(backup_ok, backup_name) = create_postgres_backup(db, docker_client)
|
|
|
|
|
@@ -161,20 +198,11 @@ def run_backup():
|
|
|
|
|
|
continue
|
|
|
|
|
|
- upload_to_dropbox(folder_path)
|
|
|
+ upload_to_dropbox(backup_name, folder_path, docker_client, dbx)
|
|
|
delete_backup_file(backup_name, docker_client)
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
docker_client.close()
|
|
|
|
|
|
-# run_backup()
|
|
|
-c = get_docker_client()
|
|
|
-
|
|
|
-(ok, name) = create_postgres_backup('test_01', c)
|
|
|
-print(ok)
|
|
|
-print(name)
|
|
|
-
|
|
|
-delete_backup_file(name, c)
|
|
|
-
|
|
|
-c.close()
|
|
|
+run_backup()
|