123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338 |
- from __future__ import print_function
- import os
- import pickle
- import os.path
- import docker
- import tarfile
- import time
- import logging
- from googleapiclient.discovery import build
- from googleapiclient.http import MediaIoBaseUpload, MediaFileUpload
- from google_auth_oauthlib.flow import InstalledAppFlow
- from google.auth.transport.requests import Request
- from docker.errors import NotFound, APIError
- from datetime import datetime, timedelta
- from io import BytesIO
- BACKUP_AGE = 30
- DOCKER_SOCK = 'unix://var/run/docker.sock'
- POSTGRES_CONTAINER = 'postgres'
- POSTGRES_USER = 'postgres'
- ODOO_IMAGE = 'odoo/robert:8.0'
- ODOO_PATH = '/home/robert'
- '''
- '''
- def configure_logging():
- root_path = './logs'
- if not os.path.exists(root_path):
- os.mkdir(root_path)
- log_name = 'backup_log_%s.txt' % datetime.now().strftime('%Y_%m_%d')
- logging.basicConfig(filename=os.path.join(root_path, log_name), filemode='w', format='%(levelname)s - %(asctime)s - %(message)s', datefmt='%Y/%m/%d %H:%M:%S', level=logging.INFO)
- '''
- '''
- def log(message, level=logging.INFO):
- logging.log(level, message)
- '''
- '''
- def get_drive_service():
- log('Obteniendo credenciales')
- creds = None
- if os.path.exists('token.pickle'):
- with open('token.pickle', 'rb') as token:
- creds = pickle.load(token)
- if not creds or not creds.valid:
- if creds and creds.expired and creds.refresh_token:
- creds.refresh(Request())
- else:
- flow = InstalledAppFlow.from_client_secrets_file('credentials.json', ['https://www.googleapis.com/auth/drive'])
- creds = flow.run_local_server()
-
- with open('token.pickle', 'wb') as token:
- pickle.dump(creds, token)
- return build('drive', 'v3', credentials=creds)
- '''
- '''
- def delete_drive_old_folders(service=None):
- if service == None:
- return False
- date_old = datetime.utcnow() - timedelta(BACKUP_AGE)
- date_old = date_old.strftime("%Y-%m-%dT00:00:00")
- query = "mimeType='application/vnd.google-apps.folder' and createdTime < '%s'" % date_old
- result = service.files().list(q=query, fields='files(id)').execute()
- files = result.get('files', [])
- if len(files) > 0:
- log('Eliminando copias antiguas')
- for item in files:
- service.files().delete(fileId=item.get('id')).execute()
- '''
- '''
- def create_folder_name():
- return datetime.now().strftime('%Y_%m_%d')
- '''
- '''
- def create_drive_folder(folder_name, service=None):
- if service == None:
- return None
- log('Obteniedo carpeta remota')
- result = service.files().list(q="name='{}'".format(folder_name)).execute()
- items = result.get('files', [])
- if len(items) > 0:
- return items[0].get('id')
- folder_metadata = {
- 'name': folder_name,
- 'mimeType': 'application/vnd.google-apps.folder'
- }
- result = service.files().create(body=folder_metadata).execute()
- return result.get('id')
- '''
- '''
- def get_docker_client():
- return docker.DockerClient(base_url=DOCKER_SOCK)
- '''
- '''
- def get_pg_container(docker_client):
- try:
- pg_container = docker_client.containers.get(POSTGRES_CONTAINER)
- return pg_container
- except (NotFound, APIError):
- log('Error al obtener el cliente de docker', logging.FATAL)
- return None
- '''
- '''
- def list_postgres_databases(docker_client):
- pg_container = get_pg_container(docker_client)
- if pg_container is None or pg_container.status == 'exited':
- return []
- log('Obteniendo los nombres de base de datos')
-
- command = "psql -U %s -t -c 'SELECT datname FROM pg_database'" % POSTGRES_USER
- result = pg_container.exec_run(command)
- if result.exit_code == -1:
- log('Error al obtener los nombres de base de datos')
- return []
- output = result.output.split('\n')
- output = map(lambda x: x.strip(), output)
- output = filter(lambda x: x != '', output)
- BLACK_LIST = ['postgres', 'template1', 'template0']
- output = filter(lambda x: x not in BLACK_LIST, output)
- return output
- '''
- '''
- def filter_databases_by_active_containers(databases, docker_client):
- try:
- containers = docker_client.containers.list(filters={'status': 'running', 'ancestor': ODOO_IMAGE})
- containers_name = map(lambda x: x.name, containers)
- log('Filtrando base de datos con sistema activos')
- return filter(lambda x: x in containers_name, databases)
- except APIError:
- log('Error al filtrar las base de datos con sistemas activos')
- return []
- '''
- '''
- def create_postgres_backup(database, docker_client):
- pg_container = get_pg_container(docker_client)
- if pg_container is None or pg_container.status == 'exited':
- return (False, None)
- log('Creando copia de seguridad de la base de datos: %s' % database)
- tmp_file = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
- command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, tmp_file)
-
- result = pg_container.exec_run(command)
- if result.exit_code == -1:
- log('Error al crear la copia de seguridad de la base de datos: %s' % database)
- return (False, tmp_file)
-
- return (True, tmp_file)
- '''
- '''
- def create_odoo_filestore_backup(folder_name):
- root_path = './tmp'
- if not os.path.exists(root_path):
- os.mkdir(root_path)
- log('Creando copia de seguridad de los archivos adjuntos: %s' % folder_name)
- tar_name = '%s_filestore_%s.tar' % (folder_name, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
- tar_path = os.path.join(root_path, tar_name)
- filestore_path = os.path.join(ODOO_PATH, folder_name, 'files', 'filestore', folder_name)
- with tarfile.open(tar_path, mode='w') as tar:
- tar.add(filestore_path, os.path.basename(filestore_path))
- tar.close()
- return os.path.abspath(tar_path)
- '''
- '''
- def upload_postgres_to_drive(backup_file_name, backup_folder_id, docket_client, service):
- if service == None:
- return None
- pg_container = get_pg_container(docket_client)
-
- if pg_container is None or pg_container.status == 'exited':
- return None
- log('Subiendo copia de seguridad de la base de datos: %s' % backup_file_name)
- (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
- raw_data = BytesIO()
- for chunk in backup_file:
- raw_data.write(chunk)
- raw_data.seek(0)
- backup_metadata = {
- 'name': backup_file_name,
- 'parents': [backup_folder_id]
- }
- backup_media = MediaIoBaseUpload(raw_data, mimetype='application/tar', chunksize=2*(1024*1024))
- result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
- raw_data.close()
- return result.get('id')
- '''
- '''
- def upload_filestore_to_drive(backup_path, backup_folder_id, service):
- if service == None:
- return None
- backup_name = os.path.basename(backup_path)
- log('Subiendo copia de seguridad de los archivos adjuntos: %s' % backup_name)
-
- backup_metadata = {
- 'name': backup_name,
- 'parents': [backup_folder_id]
- }
- backup_media = MediaFileUpload(backup_path, mimetype='application/tar', chunksize=2*(1024*1024))
- result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
- return result.get('id')
- '''
- '''
- def delete_postgres_backup(backup_name, docker_client):
- pg_container = get_pg_container(docker_client)
- if pg_container is None or pg_container.status == 'exited':
- return False
- log('Eliminando copia temporal de la base de datos: %s' % backup_name)
-
- command = 'rm %s' % backup_name
- result = pg_container.exec_run(command)
- if result.exit_code == -1:
- log('Error al eliminar copia temporal de la base de datos: %s' % backup_name, level=logging.FATAL)
- return False
- return True
- '''
- '''
- def delete_filestore_backup(backup_path):
- log('Eliminando copia temporal de los archivos adjuntos: %s' % os.path.basename(backup_path))
- os.remove(backup_path)
-
- '''
- '''
- def run_backup():
- configure_logging()
- start_time = datetime.now()
- log('Iniciando backup...')
-
- service = get_drive_service()
-
- delete_drive_old_folders(service)
-
- folder_name = create_folder_name()
-
- folder_id = create_drive_folder(folder_name, service)
-
- docker_client = get_docker_client()
-
- databases = list_postgres_databases(docker_client)
-
- databases = filter_databases_by_active_containers(databases, docker_client)
-
- for index, db in enumerate(databases):
- log('Procesando %d de %d' % (index, len(databases)))
- (backup_ok, backup_name) = create_postgres_backup(db, docker_client)
- if not backup_ok:
- if backup_name:
- delete_postgres_backup(backup_name, docker_client)
- continue
-
- upload_postgres_to_drive(backup_name, folder_id, docker_client, service)
- delete_postgres_backup(backup_name, docker_client)
- filestore_path = create_odoo_filestore_backup(db)
- upload_filestore_to_drive(filestore_path, folder_id, service)
- delete_filestore_backup(filestore_path)
- time.sleep(1)
- docker_client.close()
- end_time = datetime.now() - start_time
- log('Backup finalizado en %s' % str(end_time))
- run_backup()
|