backup 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from __future__ import print_function
  4. import os
  5. import pickle
  6. import os.path
  7. import docker
  8. import tarfile
  9. import time
  10. import logging
  11. from googleapiclient.discovery import build
  12. from googleapiclient.http import MediaIoBaseUpload, MediaFileUpload
  13. from google_auth_oauthlib.flow import InstalledAppFlow
  14. from google.auth.transport.requests import Request
  15. from docker.errors import NotFound, APIError
  16. from datetime import datetime, timedelta
  17. from io import FileIO, BytesIO
  18. BACKUP_AGE = 30
  19. DOCKER_SOCK = 'unix://var/run/docker.sock'
  20. POSTGRES_CONTAINER = 'postgres'
  21. POSTGRES_USER = 'postgres'
  22. ODOO_IMAGE = 'odoo/robert:8.0'
  23. # ODOO_PATH = '/opt/odoo'
  24. ODOO_PATH = '/home/robert'
  25. '''
  26. '''
  27. def configure_logging():
  28. root_path = './logs'
  29. if not os.path.exists(root_path):
  30. os.mkdir(root_path)
  31. log_name = 'backup_log_%s.txt' % datetime.now().strftime('%Y_%m_%d')
  32. logging.basicConfig(filename=os.path.join(root_path, log_name), filemode='w', format='%(levelname)s - %(asctime)s - %(message)s', datefmt='%Y/%m/%d %H:%M:%S', level=logging.INFO)
  33. '''
  34. '''
  35. def log(message, level=logging.INFO):
  36. logging.log(level, message)
  37. '''
  38. '''
  39. def get_drive_service():
  40. log('Obteniendo credenciales')
  41. creds = None
  42. if os.path.exists('token.pickle'):
  43. with open('token.pickle', 'rb') as token:
  44. creds = pickle.load(token)
  45. if not creds or not creds.valid:
  46. if creds and creds.expired and creds.refresh_token:
  47. creds.refresh(Request())
  48. else:
  49. flow = InstalledAppFlow.from_client_secrets_file('credentials.json', ['https://www.googleapis.com/auth/drive'])
  50. creds = flow.run_local_server()
  51. with open('token.pickle', 'wb') as token:
  52. pickle.dump(creds, token)
  53. return build('drive', 'v3', credentials=creds)
  54. '''
  55. '''
  56. def delete_drive_old_folders(service=None):
  57. if service == None:
  58. return False
  59. date_old = datetime.utcnow() - timedelta(BACKUP_AGE)
  60. date_old = date_old.strftime("%Y-%m-%dT00:00:00")
  61. query = "mimeType='application/vnd.google-apps.folder' and createdTime < '%s'" % date_old
  62. result = service.files().list(q=query, fields='files(id)').execute()
  63. files = result.get('files', [])
  64. if len(files) > 0:
  65. log('Eliminando copias antiguas')
  66. for item in files:
  67. service.files().delete(fileId=item.get('id')).execute()
  68. '''
  69. '''
  70. def create_folder_name():
  71. return datetime.now().strftime('%Y_%m_%d')
  72. '''
  73. '''
  74. def create_drive_folder(folder_name, service=None):
  75. if service == None:
  76. return None
  77. log('Obteniedo carpeta remota')
  78. result = service.files().list(q="name='{}'".format(folder_name)).execute()
  79. items = result.get('files', [])
  80. if len(items) > 0:
  81. return items[0].get('id')
  82. folder_metadata = {
  83. 'name': folder_name,
  84. 'mimeType': 'application/vnd.google-apps.folder'
  85. }
  86. result = service.files().create(body=folder_metadata).execute()
  87. return result.get('id')
  88. '''
  89. '''
  90. def get_docker_client():
  91. return docker.DockerClient(base_url=DOCKER_SOCK)
  92. '''
  93. '''
  94. def get_pg_container(docker_client):
  95. try:
  96. pg_container = docker_client.containers.get(POSTGRES_CONTAINER)
  97. return pg_container
  98. except (NotFound, APIError):
  99. log('Error al obtener el cliente de docker', logging.FATAL)
  100. return None
  101. '''
  102. '''
  103. def list_postgres_databases(docker_client):
  104. pg_container = get_pg_container(docker_client)
  105. if pg_container is None or pg_container.status == 'exited':
  106. return []
  107. log('Obteniendo los nombres de base de datos')
  108. command = "psql -U %s -t -c 'SELECT datname FROM pg_database'" % POSTGRES_USER
  109. result = pg_container.exec_run(command)
  110. if result.exit_code == -1:
  111. log('Error al obtener los nombres de base de datos')
  112. return []
  113. output = result.output.split('\n')
  114. output = map(lambda x: x.strip(), output)
  115. output = filter(lambda x: x != '', output)
  116. BLACK_LIST = ['postgres', 'template1', 'template0']
  117. output = filter(lambda x: x not in BLACK_LIST, output)
  118. return output
  119. '''
  120. '''
  121. def filter_databases_by_active_containers(databases, docker_client):
  122. try:
  123. containers = docker_client.containers.list(filters={'status': 'running', 'ancestor': ODOO_IMAGE})
  124. containers_name = map(lambda x: x.name, containers)
  125. log('Filtrando base de datos con sistema activos')
  126. return filter(lambda x: x in containers_name, databases)
  127. except APIError:
  128. log('Error al filtrar las base de datos con sistemas activos')
  129. return []
  130. '''
  131. '''
  132. def create_tmp_path():
  133. tmp_path = './tmp'
  134. if not os.path.exists(tmp_path):
  135. os.mkdir(tmp_path)
  136. '''
  137. '''
  138. def create_postgres_backup(database, docker_client):
  139. pg_container = get_pg_container(docker_client)
  140. if pg_container is None or pg_container.status == 'exited':
  141. return (False, None)
  142. log('Creando copia de seguridad de la base de datos: %s' % database)
  143. tmp_file = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
  144. command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, tmp_file)
  145. result = pg_container.exec_run(command)
  146. if result.exit_code == -1:
  147. log('Error al crear la copia de seguridad de la base de datos: %s' % database)
  148. return (False, tmp_file)
  149. return (True, tmp_file)
  150. '''
  151. '''
  152. def create_postgres_backup_v2(database, docker_client):
  153. pg_container = get_pg_container(docker_client)
  154. if pg_container is None or pg_container.status == 'exited':
  155. return None
  156. log('Creando copia de seguridad de la base de datos: %s' % database)
  157. backup_file_name = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
  158. command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, backup_file_name)
  159. (exit_code, output) = pg_container.exec_run(command, stream=True)
  160. for line in output:
  161. print(line, end='')
  162. if exit_code == -1:
  163. log('Error al crear la copia de seguridad de la base de datos: %s' % database)
  164. return None
  165. (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
  166. tmp_file_path = os.path.abspath(os.path.join('./tmp', backup_file_name))
  167. tmp_file_stream = FileIO(tmp_file_path, mode='w')
  168. for chunk in backup_file:
  169. tmp_file_stream.write(chunk)
  170. tmp_file_stream.seek(0)
  171. tmp_file_stream.close()
  172. command = 'rm %s' % backup_file_name
  173. pg_container.exec_run(command)
  174. tmp_tar = tmp_file_path.replace(os.path.basename(tmp_file_path), 'tmp.tar')
  175. os.rename(tmp_file_path, tmp_tar)
  176. tar = tarfile.open(tmp_tar, mode='r')
  177. tar.extractall('./tmp')
  178. os.remove(tmp_tar)
  179. return tmp_file_path
  180. '''
  181. '''
  182. def create_odoo_filestore_backup(folder_name):
  183. log('Creando copia de seguridad de los archivos adjuntos: %s' % folder_name)
  184. tar_name = '%s_filestore_%s.tar' % (folder_name, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
  185. tar_path = os.path.join('./tmp', tar_name)
  186. filestore_path = os.path.join(ODOO_PATH, folder_name, 'files', 'filestore', folder_name)
  187. with tarfile.open(tar_path, mode='w') as tar:
  188. tar.add(filestore_path, os.path.basename(filestore_path))
  189. tar.close()
  190. return os.path.abspath(tar_path)
  191. '''
  192. '''
  193. def upload_postgres_to_drive(backup_file_name, backup_folder_id, docket_client, service):
  194. if service == None:
  195. return None
  196. pg_container = get_pg_container(docket_client)
  197. if pg_container is None or pg_container.status == 'exited':
  198. return None
  199. log('Subiendo copia de seguridad de la base de datos: %s' % backup_file_name)
  200. (backup_file, _) = pg_container.get_archive('/%s' % backup_file_name)
  201. raw_data = BytesIO()
  202. for chunk in backup_file:
  203. raw_data.write(chunk)
  204. raw_data.close()
  205. raw_data.seek(0)
  206. backup_metadata = {
  207. 'name': backup_file_name,
  208. 'parents': [backup_folder_id]
  209. }
  210. backup_media = MediaIoBaseUpload(raw_data, mimetype='application/tar', chunksize=2*(1024*1024))
  211. try:
  212. result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
  213. return result.get('id')
  214. except Exception:
  215. log('Error al subir copia de seguridad de la base de datos: %s' % backup_file_name)
  216. return None
  217. '''
  218. '''
  219. def upload_backup_to_drive(backup_path, backup_folder_id, service):
  220. if service == None:
  221. return None
  222. backup_name = os.path.basename(backup_path)
  223. backup_metadata = {
  224. 'name': backup_name,
  225. 'parents': [backup_folder_id]
  226. }
  227. backup_media = MediaFileUpload(backup_path, mimetype='application/tar', chunksize=1024*1024, resumable=True)
  228. try:
  229. request = service.files().create(body=backup_metadata, media_body=backup_media)
  230. response = None
  231. while response is None:
  232. status, response = request.next_chunk()
  233. if status:
  234. log('Subiendo %d%% copia de seguridad: %s' % (int(status.progress() * 100), backup_name))
  235. print(response)
  236. return response
  237. except Exception:
  238. log('Error al subir copia de seguridad: %s' % backup_name)
  239. return None
  240. '''
  241. '''
  242. def delete_postgres_backup(backup_name, docker_client):
  243. pg_container = get_pg_container(docker_client)
  244. if pg_container is None or pg_container.status == 'exited':
  245. return False
  246. log('Eliminando copia temporal de la base de datos: %s' % backup_name)
  247. command = 'rm %s' % backup_name
  248. result = pg_container.exec_run(command)
  249. if result.exit_code == -1:
  250. log('Error al eliminar copia temporal de la base de datos: %s' % backup_name, level=logging.FATAL)
  251. return False
  252. return True
  253. '''
  254. '''
  255. def delete_backup_path(backup_path):
  256. log('Eliminando copia temporal: %s' % os.path.basename(backup_path))
  257. os.remove(backup_path)
  258. '''
  259. '''
  260. def run_backup():
  261. configure_logging()
  262. start_time = datetime.now()
  263. log('Iniciando backup...')
  264. # 1. get connection
  265. service = get_drive_service()
  266. # 2. delete old folders
  267. delete_drive_old_folders(service)
  268. # 4. create folder name
  269. folder_name = create_folder_name()
  270. # 4. create drive folder
  271. folder_id = create_drive_folder(folder_name, service)
  272. # 5. get docker client
  273. docker_client = get_docker_client()
  274. # 6. list database
  275. databases = list_postgres_databases(docker_client)
  276. # 7. filter databases by active containers
  277. databases = filter_databases_by_active_containers(databases, docker_client)
  278. log('Encontrados %d sistemas activos' % len(databases))
  279. # 8. backup databases
  280. create_tmp_path()
  281. for index, db in enumerate(databases):
  282. log('Creando copias de seguridad: %d de %d' % (index, len(databases)))
  283. pg_bkp_path = create_postgres_backup_v2(db, docker_client)
  284. if not pg_bkp_path:
  285. continue
  286. upload_backup_to_drive(pg_bkp_path, folder_id, service)
  287. delete_backup_path(pg_bkp_path)
  288. filestore_path = create_odoo_filestore_backup(db)
  289. upload_backup_to_drive(filestore_path, folder_id, service)
  290. delete_backup_path(filestore_path)
  291. time.sleep(1)
  292. docker_client.close()
  293. end_time = datetime.now() - start_time
  294. log('Backup finalizado en %s' % str(end_time))
  295. run_backup()