Commit e419ed6e authored by Sascha Herzinger's avatar Sascha Herzinger
Browse files

Fixed critical issue in janitor

parent 0bc2db0d
......@@ -9,21 +9,24 @@ def janitor():
file system while Fractalis is running.
"""
data_dir = os.path.join(app.config['FRACTALIS_TMP_DIR'], 'data')
tracked_ids = [key.split(':')[1] for key in redis.scan_iter('data:*')]
if not os.path.exists(data_dir):
for key in redis.scan_iter('data:*'):
redis.delete(key)
for task_id in tracked_ids:
async_result = celery.AsyncResult(task_id)
if async_result.state == 'SUCCESS':
redis.delete('data:{}'.format(task_id))
return
cached_files = [f for f in os.listdir(data_dir)
if os.path.isfile(os.path.join(data_dir, f))]
tracked_files = [key.split(':')[1] for key in redis.scan_iter('data:*')]
# clean cached files
for cached_file in cached_files:
if cached_file not in tracked_files:
if cached_file not in tracked_ids:
sync.remove_file(os.path.join(data_dir, cached_file))
# clean tracked files
for task_id in tracked_files:
for task_id in tracked_ids:
path = os.path.join(data_dir, task_id)
async_result = celery.AsyncResult(task_id)
if async_result.state == 'SUCCESS' and not os.path.exists(path):
......
......@@ -79,8 +79,7 @@ def get_all_data() -> Tuple[Response, int]:
for task_id in session['data_tasks']:
data_state = get_data_state_for_task_id(task_id, wait)
if data_state is None:
warning = "Data state with task_id '{}' expired. " \
"Discarding...".format(task_id)
warning = "Data state with task_id '{}' expired.".format(task_id)
logger.warning(warning)
continue
# remove internal information from response
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment