Commit 439f8105 authored by Sascha Herzinger's avatar Sascha Herzinger

Improved caching behavior

parent 9420065c
Pipeline #5016 failed with stages
in 16 minutes and 32 seconds
......@@ -21,6 +21,10 @@ try:
default_config = False
except RuntimeError:
pass
app.config['PERMANENT_SESSION_LIFETIME'] =\
app.config['FRACTALIS_DATA_LIFETIME']
app.config['CELERY_TASK_RESULT_EXPIRES'] =\
app.config['FRACTALIS_DATA_LIFETIME']
# setup logging
with open(app.config['FRACTALIS_LOG_CONFIG'], 'rt') as f:
......
......@@ -212,6 +212,12 @@ class AnalyticTask(Task, metaclass=abc.ABCMeta):
result = re.sub(r'NaN', 'null', result)
return result
def after_return(self, status, retval, task_id, args, kwargs, einfo):
"""Set lifetime of analysis result to prevent redis from consuming
too much memory."""
redis.expire(name='celery-task-meta-{}'.format(task_id),
time=app.config['FRACTALIS_RESULT_LIFETIME'])
def run(self, session_data_tasks: List[str],
args: dict, decrypt: bool) -> str:
"""This is called by the celery worker. This method calls other helper
......
......@@ -13,21 +13,21 @@ REDIS_PORT = '6379'
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = False
SESSION_REFRESH_EACH_REQUEST = True
PERMANENT_SESSION_LIFETIME = timedelta(days=1)
# Celery
BROKER_URL = 'amqp://guest:guest@localhost:5672//'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERYD_TASK_SOFT_TIME_LIMIT = 60 * 20
CELERYD_TASK_TIME_LIMIT = 60 * 30
CELERY_TASK_RESULT_EXPIRES = timedelta(days=10)
CELERYD_HIJACK_ROOT_LOGGER = False
# Fractalis
# Location of cache and temporary files
FRACTALIS_TMP_DIR = os.path.abspath(os.path.join(os.sep, 'tmp', 'fractalis'))
# How long to store files in the cache
FRACTALIS_CACHE_EXP = timedelta(days=10)
FRACTALIS_DATA_LIFETIME = timedelta(days=6)
# How long to keep analysis results (beware of high RAM usage)
FRACTALIS_RESULT_LIFETIME = timedelta(seconds=30)
# Should the Cache be encrypted? This might impact performance for little gain!
FRACTALIS_ENCRYPT_CACHE = False
# Location of your the log configuration file.
......
......@@ -126,7 +126,7 @@ class ETL(Task, metaclass=abc.ABCMeta):
data_state['meta']['features'] = features
redis.setex(name='data:{}'.format(self.request.id),
value=json.dumps(data_state),
time=app.config['FRACTALIS_CACHE_EXP'])
time=app.config['FRACTALIS_DATA_LIFETIME'])
@staticmethod
def secure_load(data_frame: DataFrame, file_path: str) -> None:
......
......@@ -94,7 +94,7 @@ class ETLHandler(metaclass=abc.ABCMeta):
}
redis.setex(name='data:{}'.format(task_id),
value=json.dumps(data_state),
time=app.config['FRACTALIS_CACHE_EXP'])
time=app.config['FRACTALIS_DATA_LIFETIME'])
def descriptor_to_hash(self, descriptor: dict) -> int:
"""Compute hash for the given descriptor. Used to identify duplicates.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment