|
@@ -1,400 +0,0 @@
|
|
|
-#!/usr/bin/python
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
-from __future__ import print_function
|
|
|
-import os
|
|
|
-import pickle
|
|
|
-import os.path
|
|
|
-import docker
|
|
|
-import tarfile
|
|
|
-import time
|
|
|
-import logging
|
|
|
-from googleapiclient.discovery import build
|
|
|
-from googleapiclient.http import MediaIoBaseUpload, MediaFileUpload
|
|
|
-from google_auth_oauthlib.flow import InstalledAppFlow
|
|
|
-from google.auth.transport.requests import Request
|
|
|
-from docker.errors import NotFound, APIError
|
|
|
-from datetime import datetime, timedelta
|
|
|
-from io import FileIO, BytesIO
|
|
|
-
|
|
|
-BACKUP_AGE = 30
|
|
|
-DOCKER_SOCK = 'unix://var/run/docker.sock'
|
|
|
-POSTGRES_CONTAINER = 'postgres'
|
|
|
-POSTGRES_USER = 'postgres'
|
|
|
-ODOO_IMAGE = 'odoo/robert:8.0'
|
|
|
-# ODOO_PATH = '/opt/odoo'
|
|
|
-ODOO_PATH = '/home/robert'
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def configure_logging():
|
|
|
- root_path = './logs'
|
|
|
-
|
|
|
- if not os.path.exists(root_path):
|
|
|
- os.mkdir(root_path)
|
|
|
-
|
|
|
- log_name = 'backup_log_%s.txt' % datetime.now().strftime('%Y_%m_%d')
|
|
|
- logging.basicConfig(filename=os.path.join(root_path, log_name), filemode='w', format='%(levelname)s - %(asctime)s - %(message)s', datefmt='%Y/%m/%d %H:%M:%S', level=logging.INFO)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def log(message, level=logging.INFO):
|
|
|
- logging.log(level, message)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def get_drive_service():
|
|
|
- log('Obteniendo credenciales')
|
|
|
- creds = None
|
|
|
-
|
|
|
- if os.path.exists('token.pickle'):
|
|
|
- with open('token.pickle', 'rb') as token:
|
|
|
- creds = pickle.load(token)
|
|
|
-
|
|
|
- if not creds or not creds.valid:
|
|
|
- if creds and creds.expired and creds.refresh_token:
|
|
|
- creds.refresh(Request())
|
|
|
- else:
|
|
|
- flow = InstalledAppFlow.from_client_secrets_file('credentials.json', ['https://www.googleapis.com/auth/drive'])
|
|
|
- creds = flow.run_local_server()
|
|
|
-
|
|
|
- with open('token.pickle', 'wb') as token:
|
|
|
- pickle.dump(creds, token)
|
|
|
-
|
|
|
- return build('drive', 'v3', credentials=creds)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def delete_drive_old_folders(service=None):
|
|
|
- if service == None:
|
|
|
- return False
|
|
|
-
|
|
|
- date_old = datetime.utcnow() - timedelta(BACKUP_AGE)
|
|
|
- date_old = date_old.strftime("%Y-%m-%dT00:00:00")
|
|
|
-
|
|
|
- query = "mimeType='application/vnd.google-apps.folder' and createdTime < '%s'" % date_old
|
|
|
- result = service.files().list(q=query, fields='files(id)').execute()
|
|
|
-
|
|
|
- files = result.get('files', [])
|
|
|
-
|
|
|
- if len(files) > 0:
|
|
|
- log('Eliminando copias antiguas')
|
|
|
-
|
|
|
- for item in files:
|
|
|
- service.files().delete(fileId=item.get('id')).execute()
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def create_folder_name():
|
|
|
- return datetime.now().strftime('%Y_%m_%d')
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def create_drive_folder(folder_name, service=None):
|
|
|
- if service == None:
|
|
|
- return None
|
|
|
-
|
|
|
- log('Obteniedo carpeta remota')
|
|
|
-
|
|
|
- result = service.files().list(q="name='{}'".format(folder_name)).execute()
|
|
|
- items = result.get('files', [])
|
|
|
-
|
|
|
- if len(items) > 0:
|
|
|
- return items[0].get('id')
|
|
|
-
|
|
|
- folder_metadata = {
|
|
|
- 'name': folder_name,
|
|
|
- 'mimeType': 'application/vnd.google-apps.folder'
|
|
|
- }
|
|
|
-
|
|
|
- result = service.files().create(body=folder_metadata).execute()
|
|
|
- return result.get('id')
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def get_docker_client():
|
|
|
- return docker.DockerClient(base_url=DOCKER_SOCK)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def get_pg_container(docker_client):
|
|
|
- try:
|
|
|
- pg_container = docker_client.containers.get(POSTGRES_CONTAINER)
|
|
|
- return pg_container
|
|
|
- except (NotFound, APIError):
|
|
|
- log('Error al obtener el cliente de docker', logging.FATAL)
|
|
|
- return None
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def list_postgres_databases(docker_client):
|
|
|
- pg_container = get_pg_container(docker_client)
|
|
|
-
|
|
|
- if pg_container is None or pg_container.status == 'exited':
|
|
|
- return []
|
|
|
-
|
|
|
- log('Obteniendo los nombres de base de datos')
|
|
|
-
|
|
|
- command = "psql -U %s -t -c 'SELECT datname FROM pg_database'" % POSTGRES_USER
|
|
|
- result = pg_container.exec_run(command)
|
|
|
-
|
|
|
- if result.exit_code == -1:
|
|
|
- log('Error al obtener los nombres de base de datos')
|
|
|
- return []
|
|
|
-
|
|
|
- output = result.output.split('\n')
|
|
|
- output = map(lambda x: x.strip(), output)
|
|
|
- output = filter(lambda x: x != '', output)
|
|
|
-
|
|
|
- BLACK_LIST = ['postgres', 'template1', 'template0']
|
|
|
- output = filter(lambda x: x not in BLACK_LIST, output)
|
|
|
-
|
|
|
- return output
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def filter_databases_by_active_containers(databases, docker_client):
|
|
|
- try:
|
|
|
- containers = docker_client.containers.list(filters={'status': 'running', 'ancestor': ODOO_IMAGE})
|
|
|
- containers_name = map(lambda x: x.name, containers)
|
|
|
-
|
|
|
- log('Filtrando base de datos con sistema activos')
|
|
|
- return filter(lambda x: x in containers_name, databases)
|
|
|
- except APIError:
|
|
|
- log('Error al filtrar las base de datos con sistemas activos')
|
|
|
- return []
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def create_tmp_path():
|
|
|
- tmp_path = './tmp'
|
|
|
-
|
|
|
- if not os.path.exists(tmp_path):
|
|
|
- os.mkdir(tmp_path)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def create_postgres_backup(database, docker_client):
|
|
|
- pg_container = get_pg_container(docker_client)
|
|
|
-
|
|
|
- if pg_container is None or pg_container.status == 'exited':
|
|
|
- return (False, None)
|
|
|
-
|
|
|
- log('Creando copia de seguridad de la base de datos: %s' % database)
|
|
|
-
|
|
|
- tmp_file = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
|
|
|
- command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, tmp_file)
|
|
|
-
|
|
|
- result = pg_container.exec_run(command)
|
|
|
-
|
|
|
- if result.exit_code == -1:
|
|
|
- log('Error al crear la copia de seguridad de la base de datos: %s' % database)
|
|
|
- return (False, tmp_file)
|
|
|
-
|
|
|
- return (True, tmp_file)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def create_postgres_backup_v2(database, docker_client):
|
|
|
- pg_container = get_pg_container(docker_client)
|
|
|
-
|
|
|
- if pg_container is None or pg_container.status == 'exited':
|
|
|
- return None
|
|
|
-
|
|
|
- log('Creando copia de seguridad de la base de datos: %s' % database)
|
|
|
-
|
|
|
- backup_file_name = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
|
|
|
- command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, backup_file_name)
|
|
|
-
|
|
|
- (exit_code, output) = pg_container.exec_run(command, stream=True)
|
|
|
-
|
|
|
- for line in output:
|
|
|
- print(line, end='')
|
|
|
-
|
|
|
- if exit_code == -1:
|
|
|
- log('Error al crear la copia de seguridad de la base de datos: %s' % database)
|
|
|
- return None
|
|
|
-
|
|
|
- (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
|
|
|
-
|
|
|
- tmp_file_path = os.path.abspath(os.path.join('./tmp', backup_file_name))
|
|
|
- tmp_file_stream = FileIO(tmp_file_path, mode='w')
|
|
|
-
|
|
|
- for chunk in backup_file:
|
|
|
- tmp_file_stream.write(chunk)
|
|
|
-
|
|
|
- tmp_file_stream.seek(0)
|
|
|
- tmp_file_stream.close()
|
|
|
-
|
|
|
- command = 'rm %s' % backup_file_name
|
|
|
- pg_container.exec_run(command)
|
|
|
-
|
|
|
- tmp_tar = tmp_file_path.replace(os.path.basename(tmp_file_path), 'tmp.tar')
|
|
|
- os.rename(tmp_file_path, tmp_tar)
|
|
|
-
|
|
|
- tar = tarfile.open(tmp_tar, mode='r')
|
|
|
- tar.extractall('./tmp')
|
|
|
-
|
|
|
- os.remove(tmp_tar)
|
|
|
- return tmp_file_path
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def create_odoo_filestore_backup(folder_name):
|
|
|
- log('Creando copia de seguridad de los archivos adjuntos: %s' % folder_name)
|
|
|
-
|
|
|
- tar_name = '%s_filestore_%s.tar' % (folder_name, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
|
|
|
- tar_path = os.path.join('./tmp', tar_name)
|
|
|
- filestore_path = os.path.join(ODOO_PATH, folder_name, 'files', 'filestore', folder_name)
|
|
|
-
|
|
|
- with tarfile.open(tar_path, mode='w') as tar:
|
|
|
- tar.add(filestore_path, os.path.basename(filestore_path))
|
|
|
- tar.close()
|
|
|
-
|
|
|
- return os.path.abspath(tar_path)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def upload_postgres_to_drive(backup_file_name, backup_folder_id, docket_client, service):
|
|
|
- if service == None:
|
|
|
- return None
|
|
|
-
|
|
|
- pg_container = get_pg_container(docket_client)
|
|
|
-
|
|
|
- if pg_container is None or pg_container.status == 'exited':
|
|
|
- return None
|
|
|
-
|
|
|
- log('Subiendo copia de seguridad de la base de datos: %s' % backup_file_name)
|
|
|
-
|
|
|
- (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
|
|
|
- raw_data = BytesIO()
|
|
|
-
|
|
|
- for chunk in backup_file:
|
|
|
- raw_data.write(chunk)
|
|
|
-
|
|
|
- raw_data.close()
|
|
|
- raw_data.seek(0)
|
|
|
-
|
|
|
- backup_metadata = {
|
|
|
- 'name': backup_file_name,
|
|
|
- 'parents': [backup_folder_id]
|
|
|
- }
|
|
|
- backup_media = MediaIoBaseUpload(raw_data, mimetype='application/tar', chunksize=2*(1024*1024))
|
|
|
-
|
|
|
- try:
|
|
|
- result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
|
|
|
- return result.get('id')
|
|
|
- except Exception:
|
|
|
- log('Error al subir copia de seguridad de la base de datos: %s' % backup_file_name)
|
|
|
- return None
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def upload_backup_to_drive(backup_path, backup_folder_id, service):
|
|
|
- if service == None:
|
|
|
- return None
|
|
|
-
|
|
|
- backup_name = os.path.basename(backup_path)
|
|
|
-
|
|
|
- backup_metadata = {
|
|
|
- 'name': backup_name,
|
|
|
- 'parents': [backup_folder_id]
|
|
|
- }
|
|
|
- backup_media = MediaFileUpload(backup_path, mimetype='application/tar', chunksize=1024*1024, resumable=True)
|
|
|
-
|
|
|
- try:
|
|
|
- request = service.files().create(body=backup_metadata, media_body=backup_media)
|
|
|
- response = None
|
|
|
-
|
|
|
- while response is None:
|
|
|
- status, response = request.next_chunk()
|
|
|
-
|
|
|
- if status:
|
|
|
- log('Subiendo %d%% copia de seguridad: %s' % (int(status.progress() * 100), backup_name))
|
|
|
-
|
|
|
- print(response)
|
|
|
- return response
|
|
|
- except Exception:
|
|
|
- log('Error al subir copia de seguridad: %s' % backup_name)
|
|
|
- return None
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def delete_postgres_backup(backup_name, docker_client):
|
|
|
- pg_container = get_pg_container(docker_client)
|
|
|
-
|
|
|
- if pg_container is None or pg_container.status == 'exited':
|
|
|
- return False
|
|
|
-
|
|
|
- log('Eliminando copia temporal de la base de datos: %s' % backup_name)
|
|
|
-
|
|
|
- command = 'rm %s' % backup_name
|
|
|
- result = pg_container.exec_run(command)
|
|
|
-
|
|
|
- if result.exit_code == -1:
|
|
|
- log('Error al eliminar copia temporal de la base de datos: %s' % backup_name, level=logging.FATAL)
|
|
|
- return False
|
|
|
-
|
|
|
- return True
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def delete_backup_path(backup_path):
|
|
|
- log('Eliminando copia temporal: %s' % os.path.basename(backup_path))
|
|
|
- os.remove(backup_path)
|
|
|
-
|
|
|
-'''
|
|
|
-'''
|
|
|
-def run_backup():
|
|
|
- configure_logging()
|
|
|
-
|
|
|
- start_time = datetime.now()
|
|
|
- log('Iniciando backup...')
|
|
|
-
|
|
|
- # 1. get connection
|
|
|
- service = get_drive_service()
|
|
|
-
|
|
|
- # 2. delete old folders
|
|
|
- delete_drive_old_folders(service)
|
|
|
-
|
|
|
- # 4. create folder name
|
|
|
- folder_name = create_folder_name()
|
|
|
-
|
|
|
- # 4. create drive folder
|
|
|
- folder_id = create_drive_folder(folder_name, service)
|
|
|
-
|
|
|
- # 5. get docker client
|
|
|
- docker_client = get_docker_client()
|
|
|
-
|
|
|
- # 6. list database
|
|
|
- databases = list_postgres_databases(docker_client)
|
|
|
-
|
|
|
- # 7. filter databases by active containers
|
|
|
- databases = filter_databases_by_active_containers(databases, docker_client)
|
|
|
- log('Encontrados %d sistemas activos' % len(databases))
|
|
|
-
|
|
|
- # 8. backup databases
|
|
|
- create_tmp_path()
|
|
|
-
|
|
|
- for index, db in enumerate(databases):
|
|
|
- log('Creando copias de seguridad: %d de %d' % (index, len(databases)))
|
|
|
-
|
|
|
- pg_bkp_path = create_postgres_backup_v2(db, docker_client)
|
|
|
-
|
|
|
- if not pg_bkp_path:
|
|
|
- continue
|
|
|
-
|
|
|
- upload_backup_to_drive(pg_bkp_path, folder_id, service)
|
|
|
- delete_backup_path(pg_bkp_path)
|
|
|
-
|
|
|
- filestore_path = create_odoo_filestore_backup(db)
|
|
|
- upload_backup_to_drive(filestore_path, folder_id, service)
|
|
|
- delete_backup_path(filestore_path)
|
|
|
-
|
|
|
- time.sleep(1)
|
|
|
-
|
|
|
- docker_client.close()
|
|
|
-
|
|
|
- end_time = datetime.now() - start_time
|
|
|
- log('Backup finalizado en %s' % str(end_time))
|
|
|
-
|
|
|
-run_backup()
|