diff --git a/AIPscan/Aggregator/tasks.py b/AIPscan/Aggregator/tasks.py index 2b1e2120..c1261211 100644 --- a/AIPscan/Aggregator/tasks.py +++ b/AIPscan/Aggregator/tasks.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import json import os +import shutil import requests from celery.utils.log import get_task_logger @@ -185,8 +186,10 @@ def workflow_coordinator( ) total_replicas = len([package for package in all_packages if package.is_replica()]) - summary = "aips: '{}'; sips: '{}'; dips: '{}'; deleted: '{}'; replicated: '{}'".format( - total_aips, total_sips, total_dips, total_deleted_aips, total_replicas + summary = ( + "aips: '{}'; sips: '{}'; dips: '{}'; deleted: '{}'; replicated: '{}'".format( + total_aips, total_sips, total_dips, total_deleted_aips, total_replicas + ) ) logger.info("%s", summary) @@ -272,6 +275,16 @@ def package_lists_request(self, apiUrl, timestamp, packages_directory): } +@celery.task() +def fetch_job_file_cleanup(fetch_job_id): + obj = FetchJob.query.filter_by(id=fetch_job_id).first() + + if os.path.isdir(obj.download_directory): + shutil.rmtree(obj.download_directory) + + logger.info("Cleaned up after fetch job {}".format(fetch_job_id)) + + @celery.task() def get_mets( package_uuid, diff --git a/AIPscan/Aggregator/views.py b/AIPscan/Aggregator/views.py index 4f45d3ba..2abf56d3 100644 --- a/AIPscan/Aggregator/views.py +++ b/AIPscan/Aggregator/views.py @@ -315,6 +315,9 @@ def get_mets_task_status(coordinatorid): downloadStart = _format_date(start) obj.download_end = downloadEnd db.session.commit() + + tasks.fetch_job_file_cleanup.delay(fetchJobId) + response = {"state": "COMPLETED"} flash("Fetch Job {} completed".format(downloadStart)) return jsonify(response)