Browse Source

[ADD] service and timer

robert 5 years ago
parent
commit
9f53fa1626
3 changed files with 40 additions and 10 deletions
  1. 25 10
      backup
  2. 6 0
      backup.service
  3. 9 0
      backup.timer

+ 25 - 10
backup.py → backup

@@ -205,9 +205,12 @@ def create_postgres_backup_v2(database, docker_client):
     backup_file_name = '%s_database_%s.tar' % (database, datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
     command = 'pg_dump -U %s -d %s -F tar -C -b -c -f %s' % (POSTGRES_USER, database, backup_file_name)
     
-    result = pg_container.exec_run(command)
+    (exit_code, output) = pg_container.exec_run(command, stream=True)
 
-    if result.exit_code == -1:
+    for line in output:
+        print(line, end='')
+
+    if exit_code == -1:
         log('Error al crear la copia de seguridad de la base de datos: %s' % database)
         return None
 
@@ -222,6 +225,9 @@ def create_postgres_backup_v2(database, docker_client):
     tmp_file_stream.seek(0)
     tmp_file_stream.close()
 
+    command = 'rm %s' % backup_file_name
+    pg_container.exec_run(command)
+
     tmp_tar = tmp_file_path.replace(os.path.basename(tmp_file_path), 'tmp.tar')
     os.rename(tmp_file_path, tmp_tar)
 
@@ -288,17 +294,25 @@ def upload_backup_to_drive(backup_path, backup_folder_id, service):
         return None
 
     backup_name = os.path.basename(backup_path)
-    log('Subiendo copia de seguridad: %s' % backup_name)
     
     backup_metadata = {
         'name': backup_name,
         'parents': [backup_folder_id]
     }
-    backup_media = MediaFileUpload(backup_path, mimetype='application/tar', chunksize=2*(1024*1024))
+    backup_media = MediaFileUpload(backup_path, mimetype='application/tar', chunksize=1024*1024, resumable=True)
 
     try:
-        result = service.files().create(body=backup_metadata, media_body=backup_media).execute()
-        return result.get('id')
+        request = service.files().create(body=backup_metadata, media_body=backup_media)
+        response = None
+
+        while response is None:
+            status, response = request.next_chunk()
+
+            if status:
+                log('Subiendo %d%% copia de seguridad: %s' % (int(status.progress() * 100), backup_name))
+
+        print(response)
+        return response
     except Exception:
         log('Error al subir copia de seguridad: %s' % backup_name)
         return None
@@ -325,7 +339,7 @@ def delete_postgres_backup(backup_name, docker_client):
 '''
 '''
 def delete_backup_path(backup_path):
-    log('Eliminando copia temporal de los archivos adjuntos: %s' % os.path.basename(backup_path))
+    log('Eliminando copia temporal: %s' % os.path.basename(backup_path))
     os.remove(backup_path)
     
 '''
@@ -356,20 +370,21 @@ def run_backup():
 
     # 7. filter databases by active containers
     databases = filter_databases_by_active_containers(databases, docker_client)
+    log('Encontrados %d sistemas activos' % len(databases))
 
     # 8. backup databases
     create_tmp_path()
 
     for index, db in enumerate(databases):
-        log('Creado copias: %d de %d' % (index, len(databases)))
+        log('Creando copias de seguridad: %d de %d' % (index, len(databases)))
         
         pg_bkp_path = create_postgres_backup_v2(db, docker_client)
 
         if not pg_bkp_path:
             continue
         
-        upload_backup_to_drive(pg_bkp_path, folder_id, docker_client, service)
-        delete_backup_path(pg_bkp_path, docker_client)
+        upload_backup_to_drive(pg_bkp_path, folder_id, service)
+        delete_backup_path(pg_bkp_path)
 
         filestore_path = create_odoo_filestore_backup(db)
         upload_backup_to_drive(filestore_path, folder_id, service)

+ 6 - 0
backup.service

@@ -0,0 +1,6 @@
+[Unit]
+Description=Backup Service
+
+[Service]
+Type=oneshot
+ExecStart=./opt/google_drive/backup

+ 9 - 0
backup.timer

@@ -0,0 +1,9 @@
+[Unit]
+Description=Backup Timer
+
+[Timer]
+OnCalendar=*-*-* 10:00:00
+Persistent=true
+
+[Install]
+WantedBy=timers.target