backup.py 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from __future__ import print_function
  4. import os
  5. import pickle
  6. import os.path
  7. import docker
  8. import tarfile
  9. import time
  10. import logging
  11. from googleapiclient.discovery import build
  12. from googleapiclient.http import MediaIoBaseUpload, MediaFileUpload
  13. from google_auth_oauthlib.flow import InstalledAppFlow
  14. from google.auth.transport.requests import Request
  15. from docker.errors import NotFound, APIError
  16. from datetime import datetime, timedelta
  17. from io import BytesIO
  18. BACKUP_AGE = 30
  19. DOCKER_SOCK = 'unix://var/run/docker.sock'
  20. POSTGRES_CONTAINER = 'postgres'
  21. POSTGRES_USER = 'postgres'
  22. ODOO_IMAGE = 'odoo/robert:8.0'
  23. # ODOO_PATH = '/opt/odoo'
  24. ODOO_PATH = '/home/robert'
  25. '''
  26. '''
  27. def configure_logging():
  28. root_path = './logs'
  29. if not os.path.exists(root_path):
  30. os.mkdir(root_path)
  31. log_name = 'backup_log_%s.txt' % datetime.now().strftime('%Y_%m_%d')
  32. logging.basicConfig(filename=os.path.join(root_path, log_name), filemode='w', format='%(levelname)s - %(asctime)s - %(message)s', datefmt='%Y/%m/%d %H:%M:%S', level=logging.INFO)
  33. '''
  34. '''
  35. def log(message, level=logging.INFO):
  36. logging.log(level, message)
  37. '''
  38. '''
  39. def get_drive_service():
  40. log('Obteniendo credenciales')
  41. creds = None
  42. if os.path.exists('token.pickle'):
  43. with open('token.pickle', 'rb') as token:
  44. creds = pickle.load(token)
  45. if not creds or not creds.valid:
  46. if creds and creds.expired and creds.refresh_token:
  47. creds.refresh(Request())
  48. else:
  49. flow = InstalledAppFlow.from_client_secrets_file('credentials.json', ['https://www.googleapis.com/auth/drive'])
  50. creds = flow.run_local_server()
  51. with open('token.pickle', 'wb') as token:
  52. pickle.dump(creds, token)
  53. return build('drive', 'v3', credentials=creds)
  54. '''
  55. '''
  56. def delete_drive_old_folders(service=None):
  57. if service == None:
  58. return False
  59. date_old = datetime.utcnow() - timedelta(BACKUP_AGE)
  60. date_old = date_old.strftime("%Y-%m-%dT00:00:00")
  61. query = "mimeType='application/vnd.google-apps.folder' and createdTime < '%s'" % date_old
  62. result = service.files().list(q=query, fields='files(id)').execute()
  63. files = result.get('files', [])
  64. if len(files) > 0:
  65. log('Eliminando copias antiguas')
  66. for item in files:
  67. service.files().delete(fileId=item.get('id')).execute()
  68. '''
  69. '''
  70. def create_folder_name():
  71. return datetime.now().strftime('%Y_%m_%d')
  72. '''
  73. '''
  74. def create_drive_folder(folder_name, service=None):
  75. if service == None:
  76. return None
  77. log('Obteniedo carpeta remota')
  78. result = service.files().list(q="name='{}'".format(folder_name)).execute()
  79. items = result.get('files', [])
  80. if len(items) > 0:
  81. return items[0].get('id')
  82. folder_metadata = {
  83. 'name': folder_name,
  84. 'mimeType': 'application/vnd.google-apps.folder'
  85. }
  86. result = service.files().create(body=folder_metadata).execute()
  87. return result.get('id')
  88. '''
  89. '''
  90. def get_docker_client():
  91. return docker.DockerClient(base_url=DOCKER_SOCK)
  92. '''
  93. '''
  94. def get_pg_container(docker_client):
  95. try:
  96. pg_container = docker_client.containers.get(POSTGRES_CONTAINER)
  97. return pg_container
  98. except (NotFound, APIError):
  99. log('Error al obtener el cliente de docker', logging.FATAL)
  100. return None
  101. '''
  102. '''
  103. def list_postgres_databases(docker_client):
  104. pg_container = get_pg_container(docker_client)
  105. if pg_container is None or pg_container.status == 'exited':
  106. return []
  107. log('Obteniendo los nombres de base de datos')
  108. command = "psql -U %s -t -c 'SELECT datname FROM pg_database'" % POSTGRES_USER
  109. result = pg_container.exec_run(command)
  110. if result.exit_code == -1:
  111. log('Error al obtener los nombres de base de datos')
  112. return []
  113. output = result.output.split('\n')
  114. output = map(lambda x: x.strip(), output)
  115. output = filter(lambda x: x != '', output)
  116. BLACK_LIST = ['postgres', 'template1', 'template0']
  117. output = filter(lambda x: x not in BLACK_LIST, output)
  118. return output
  119. '''
  120. '''
  121. def filter_databases_by_active_containers(databases, docker_client):
  122. try:
  123. containers = docker_client.containers.list(filters={'status': 'running', 'ancestor': ODOO_IMAGE})
  124. containers_name = map(lambda x: x.name, containers)
  125. log('Filtrando base de datos con sistema activos')
  126. return filter(lambda x: x in containers_name, databases)
  127. except APIError:
  128. log('Error al filtrar las base de datos con sistemas activos')
  129. return []
  130. '''
  131. '''
  132. def create_postgres_backup(database, docker_client):
  133. pg_container = get_pg_container(docker_client)
  134. if pg_container is None or pg_container.status == 'exited':
  135. return (False, None)
  136. log('Creando copia de seguridad de la base de datos: %s' % database)
  137. tmp_file = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
  138. command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, tmp_file)
  139. result = pg_container.exec_run(command)
  140. if result.exit_code == -1:
  141. log('Error al crear la copia de seguridad de la base de datos: %s' % database)
  142. return (False, tmp_file)
  143. return (True, tmp_file)
  144. '''
  145. '''
  146. def create_odoo_filestore_backup(folder_name):
  147. root_path = './tmp'
  148. if not os.path.exists(root_path):
  149. os.mkdir(root_path)
  150. log('Creando copia de seguridad de los archivos adjuntos: %s' % folder_name)
  151. tar_name = '%s_filestore_%s.tar' % (folder_name, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
  152. tar_path = os.path.join(root_path, tar_name)
  153. filestore_path = os.path.join(ODOO_PATH, folder_name, 'files', 'filestore', folder_name)
  154. with tarfile.open(tar_path, mode='w') as tar:
  155. tar.add(filestore_path, os.path.basename(filestore_path))
  156. tar.close()
  157. return os.path.abspath(tar_path)
  158. '''
  159. '''
  160. def upload_postgres_to_drive(backup_file_name, backup_folder_id, docket_client, service):
  161. if service == None:
  162. return None
  163. pg_container = get_pg_container(docket_client)
  164. if pg_container is None or pg_container.status == 'exited':
  165. return None
  166. log('Subiendo copia de seguridad de la base de datos: %s' % backup_file_name)
  167. (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
  168. raw_data = BytesIO()
  169. for chunk in backup_file:
  170. raw_data.write(chunk)
  171. raw_data.seek(0)
  172. backup_metadata = {
  173. 'name': backup_file_name,
  174. 'parents': [backup_folder_id]
  175. }
  176. backup_media = MediaIoBaseUpload(raw_data, mimetype='application/tar', chunksize=2*(1024*1024))
  177. result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
  178. raw_data.close()
  179. return result.get('id')
  180. '''
  181. '''
  182. def upload_filestore_to_drive(backup_path, backup_folder_id, service):
  183. if service == None:
  184. return None
  185. backup_name = os.path.basename(backup_path)
  186. log('Subiendo copia de seguridad de los archivos adjuntos: %s' % backup_name)
  187. backup_metadata = {
  188. 'name': backup_name,
  189. 'parents': [backup_folder_id]
  190. }
  191. backup_media = MediaFileUpload(backup_path, mimetype='application/tar', chunksize=2*(1024*1024))
  192. result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
  193. return result.get('id')
  194. '''
  195. '''
  196. def delete_postgres_backup(backup_name, docker_client):
  197. pg_container = get_pg_container(docker_client)
  198. if pg_container is None or pg_container.status == 'exited':
  199. return False
  200. log('Eliminando copia temporal de la base de datos: %s' % backup_name)
  201. command = 'rm %s' % backup_name
  202. result = pg_container.exec_run(command)
  203. if result.exit_code == -1:
  204. log('Error al eliminar copia temporal de la base de datos: %s' % backup_name, level=logging.FATAL)
  205. return False
  206. return True
  207. '''
  208. '''
  209. def delete_filestore_backup(backup_path):
  210. log('Eliminando copia temporal de los archivos adjuntos: %s' % os.path.basename(backup_path))
  211. os.remove(backup_path)
  212. '''
  213. '''
  214. def run_backup():
  215. configure_logging()
  216. start_time = datetime.now()
  217. log('Iniciando backup...')
  218. # 1. get connection
  219. service = get_drive_service()
  220. # 2. delete old folders
  221. delete_drive_old_folders(service)
  222. # 4. create folder name
  223. folder_name = create_folder_name()
  224. # 4. create drive folder
  225. folder_id = create_drive_folder(folder_name, service)
  226. # 5. get docker client
  227. docker_client = get_docker_client()
  228. # 6. list database
  229. databases = list_postgres_databases(docker_client)
  230. # 7. filter databases by active containers
  231. databases = filter_databases_by_active_containers(databases, docker_client)
  232. # 8. backup databases
  233. for db in databases:
  234. (backup_ok, backup_name) = create_postgres_backup(db, docker_client)
  235. if not backup_ok:
  236. if backup_name:
  237. delete_postgres_backup(backup_name, docker_client)
  238. continue
  239. upload_postgres_to_drive(backup_name, folder_id, docker_client, service)
  240. delete_postgres_backup(backup_name, docker_client)
  241. filestore_path = create_odoo_filestore_backup(db)
  242. upload_filestore_to_drive(filestore_path, folder_id, service)
  243. delete_filestore_backup(filestore_path)
  244. time.sleep(1)
  245. docker_client.close()
  246. end_time = datetime.now() - start_time
  247. log('Backup finalizado en %s' % str(end_time))
  248. run_backup()