|
@@ -0,0 +1,250 @@
|
|
|
+#!/usr/bin/python
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+from __future__ import print_function
|
|
|
+import os
|
|
|
+import pickle
|
|
|
+import os.path
|
|
|
+import docker
|
|
|
+import tarfile
|
|
|
+import time
|
|
|
+from googleapiclient.discovery import build
|
|
|
+from googleapiclient.http import MediaIoBaseUpload
|
|
|
+from google_auth_oauthlib.flow import InstalledAppFlow
|
|
|
+from google.auth.transport.requests import Request
|
|
|
+from docker.errors import NotFound, APIError
|
|
|
+from datetime import datetime, timedelta
|
|
|
+from io import BytesIO
|
|
|
+
|
|
|
+BACKUP_AGE = 30
|
|
|
+DOCKER_SOCK = 'unix://var/run/docker.sock'
|
|
|
+POSTGRES_CONTAINER = 'postgres'
|
|
|
+POSTGRES_USER = 'postgres'
|
|
|
+ODOO_IMAGE = 'odoo/robert:8.0'
|
|
|
+# ODOO_PATH = '/opt/odoo'
|
|
|
+ODOO_PATH = '/home/robert'
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def get_drive_service():
|
|
|
+ creds = None
|
|
|
+
|
|
|
+ if os.path.exists('token.pickle'):
|
|
|
+ with open('token.pickle', 'rb') as token:
|
|
|
+ creds = pickle.load(token)
|
|
|
+
|
|
|
+ if not creds or not creds.valid:
|
|
|
+ if creds and creds.expired and creds.refresh_token:
|
|
|
+ creds.refresh(Request())
|
|
|
+ else:
|
|
|
+ flow = InstalledAppFlow.from_client_secrets_file('credentials.json', ['https://www.googleapis.com/auth/drive'])
|
|
|
+ creds = flow.run_local_server()
|
|
|
+
|
|
|
+ with open('token.pickle', 'wb') as token:
|
|
|
+ pickle.dump(creds, token)
|
|
|
+
|
|
|
+ return build('drive', 'v3', credentials=creds)
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def delete_drive_old_folders(service=None):
|
|
|
+ if service == None:
|
|
|
+ return False
|
|
|
+
|
|
|
+ date_old = datetime.utcnow() - timedelta(BACKUP_AGE)
|
|
|
+ date_old = date_old.strftime("%Y-%m-%dT00:00:00")
|
|
|
+
|
|
|
+ query = "mimeType='application/vnd.google-apps.folder' and createdTime < '%s'" % date_old
|
|
|
+ result = service.files().list(q=query, fields='files(id, name)').execute()
|
|
|
+
|
|
|
+ for item in result.get('files', []):
|
|
|
+ service.files().delete(fileId=item.get('id')).execute()
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def create_folder_name():
|
|
|
+ return datetime.now().strftime('%Y_%m_%d')
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def create_drive_folder(folder_name, service=None):
|
|
|
+ if service == None:
|
|
|
+ return None
|
|
|
+
|
|
|
+ result = service.files().list(q="name='{}'".format(folder_name)).execute()
|
|
|
+ items = result.get('files', [])
|
|
|
+
|
|
|
+ if len(items) > 0:
|
|
|
+ return items[0].get('id')
|
|
|
+
|
|
|
+ folder_metadata = {
|
|
|
+ 'name': folder_name,
|
|
|
+ 'mimeType': 'application/vnd.google-apps.folder'
|
|
|
+ }
|
|
|
+
|
|
|
+ result = service.files().create(body=folder_metadata).execute()
|
|
|
+ return result.get('id')
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def get_docker_client():
|
|
|
+ return docker.DockerClient(base_url=DOCKER_SOCK)
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def get_pg_container(docker_client):
|
|
|
+ try:
|
|
|
+ pg_container = docker_client.containers.get(POSTGRES_CONTAINER)
|
|
|
+ return pg_container
|
|
|
+ except (NotFound, APIError):
|
|
|
+ return None
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def list_postgres_databases(docker_client):
|
|
|
+ pg_container = get_pg_container(docker_client)
|
|
|
+
|
|
|
+ if pg_container is None or pg_container.status == 'exited':
|
|
|
+ return []
|
|
|
+
|
|
|
+ command = "psql -U %s -t -c 'SELECT datname FROM pg_database'" % POSTGRES_USER
|
|
|
+ result = pg_container.exec_run(command)
|
|
|
+
|
|
|
+ if result.exit_code == -1:
|
|
|
+ return []
|
|
|
+
|
|
|
+ output = result.output.split('\n')
|
|
|
+ output = map(lambda x: x.strip(), output)
|
|
|
+ output = filter(lambda x: x != '', output)
|
|
|
+
|
|
|
+ BLACK_LIST = ['postgres', 'template1', 'template0']
|
|
|
+ output = filter(lambda x: x not in BLACK_LIST, output)
|
|
|
+
|
|
|
+ return output
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def filter_databases_by_active_containers(databases, docker_client):
|
|
|
+ try:
|
|
|
+ containers = docker_client.containers.list(filters={'status': 'running', 'ancestor': ODOO_IMAGE})
|
|
|
+ containers_name = map(lambda x: x.name, containers)
|
|
|
+
|
|
|
+ return filter(lambda x: x in containers_name, databases)
|
|
|
+ except APIError:
|
|
|
+ return []
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def create_postgres_backup(database, docker_client):
|
|
|
+ pg_container = get_pg_container(docker_client)
|
|
|
+
|
|
|
+ if pg_container is None or pg_container.status == 'exited':
|
|
|
+ return (False, None)
|
|
|
+
|
|
|
+ tmp_file = '%s_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
|
|
|
+ command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, tmp_file)
|
|
|
+
|
|
|
+ result = pg_container.exec_run(command)
|
|
|
+
|
|
|
+ if result.exit_code == -1:
|
|
|
+ (False, tmp_file)
|
|
|
+
|
|
|
+ return (True, tmp_file)
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def create_odoo_filestore_backup(folder_name):
|
|
|
+ filestore_path = os.path.join(ODOO_PATH, folder_name, 'files', 'filestore', folder_name)
|
|
|
+
|
|
|
+ with tarfile.open(os.path.join(filestore_path, folder_name + '.tar'), mode='w') as filestore_tar:
|
|
|
+ filestore_tar.add(filestore_path)
|
|
|
+ filestore_tar.close()
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def upload_to_drive(backup_file_name, backup_folder_id, docket_client, service):
|
|
|
+ if service == None:
|
|
|
+ return None
|
|
|
+
|
|
|
+ pg_container = get_pg_container(docket_client)
|
|
|
+
|
|
|
+ if pg_container is None or pg_container.status == 'exited':
|
|
|
+ return None
|
|
|
+
|
|
|
+ (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
|
|
|
+ raw_data = BytesIO()
|
|
|
+
|
|
|
+ for chunk in backup_file:
|
|
|
+ raw_data.write(chunk)
|
|
|
+
|
|
|
+ raw_data.seek(0)
|
|
|
+
|
|
|
+ backup_metadata = {
|
|
|
+ 'name': backup_file_name,
|
|
|
+ 'parents': [backup_folder_id]
|
|
|
+ }
|
|
|
+ backup_media = MediaIoBaseUpload(raw_data, mimetype='application/tar', chunksize=5*(1024*1024))
|
|
|
+
|
|
|
+ result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
|
|
|
+ raw_data.close()
|
|
|
+
|
|
|
+ return result.get('id')
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def delete_backup_file(backup_name, docker_client):
|
|
|
+ pg_container = get_pg_container(docker_client)
|
|
|
+
|
|
|
+ if pg_container is None or pg_container.status == 'exited':
|
|
|
+ return False
|
|
|
+
|
|
|
+ command = 'rm %s' % backup_name
|
|
|
+ result = pg_container.exec_run(command)
|
|
|
+
|
|
|
+ if result.exit_code == -1:
|
|
|
+ return False
|
|
|
+
|
|
|
+ return True
|
|
|
+
|
|
|
+'''
|
|
|
+'''
|
|
|
+def run_backup():
|
|
|
+ # 1. get connection
|
|
|
+ service = get_drive_service()
|
|
|
+
|
|
|
+ # 2. delete old folders
|
|
|
+ delete_drive_old_folders(service)
|
|
|
+
|
|
|
+ # 4. create folder name
|
|
|
+ folder_name = create_folder_name()
|
|
|
+
|
|
|
+ # 4. create drive folder
|
|
|
+ folder_id = create_drive_folder(folder_name, service)
|
|
|
+
|
|
|
+ # 5. get docker client
|
|
|
+ docker_client = get_docker_client()
|
|
|
+
|
|
|
+ # 6. list database
|
|
|
+ databases = list_postgres_databases(docker_client)
|
|
|
+
|
|
|
+ # 7. filter databases by active containers
|
|
|
+ databases = filter_databases_by_active_containers(databases, docker_client)
|
|
|
+
|
|
|
+ # 8. backup databases
|
|
|
+ for db in databases:
|
|
|
+ (backup_ok, backup_name) = create_postgres_backup(db, docker_client)
|
|
|
+
|
|
|
+ if not backup_ok:
|
|
|
+ if backup_name:
|
|
|
+ delete_backup_file(backup_name, docker_client)
|
|
|
+
|
|
|
+ continue
|
|
|
+
|
|
|
+ upload_to_drive(backup_name, folder_id, docker_client, service)
|
|
|
+ delete_backup_file(backup_name, docker_client)
|
|
|
+
|
|
|
+ time.sleep(1)
|
|
|
+
|
|
|
+ docker_client.close()
|
|
|
+
|
|
|
+# run_backup()
|
|
|
+create_odoo_filestore_backup('ferresur')
|