Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
Jochem Bijlard
fractalis
Commits
90b612aa
Commit
90b612aa
authored
Mar 02, 2018
by
Sascha Herzinger
Browse files
Fixing import manage issue in sdist
parent
744563a6
Changes
6
Hide whitespace changes
Inline
Side-by-side
MANIFEST.in
View file @
90b612aa
include fractalis/logging.yaml
include manage.py
fractalis/cleanup.py
0 → 100644
View file @
90b612aa
import
os
from
fractalis
import
app
,
redis
,
sync
,
celery
@
celery
.
task
def
janitor
():
"""Ideally this is maintained by a systemd service to cleanup redis and the
file system while Fractalis is running.
"""
tmp_dir
=
app
.
config
[
'FRACTALIS_TMP_DIR'
]
tracked_files
=
[
key
.
split
(
':'
)[
1
]
for
key
in
redis
.
scan_iter
(
'data:*'
)]
cached_files
=
[
f
for
f
in
os
.
listdir
(
tmp_dir
)
if
os
.
path
.
isfile
(
os
.
path
.
join
(
tmp_dir
,
f
))]
for
cached_file
in
cached_files
:
if
cached_file
not
in
tracked_files
:
sync
.
remove_file
(
os
.
path
.
join
(
tmp_dir
,
cached_file
))
fractalis/data/etlhandler.py
View file @
90b612aa
...
...
@@ -7,7 +7,7 @@ import logging
from
uuid
import
uuid4
from
typing
import
List
,
Union
import
m
an
age
from
fractalis.cleanup
import
j
an
itor
from
fractalis
import
app
,
redis
,
celery
from
fractalis.data.etl
import
ETL
...
...
@@ -136,7 +136,7 @@ class ETLHandler(metaclass=abc.ABCMeta):
task_ids
=
self
.
find_duplicates
(
data_tasks
,
descriptor
)
for
task_id
in
task_ids
:
redis
.
delete
(
'data:{}'
.
format
(
task_id
))
manage
.
janitor
.
delay
()
janitor
.
delay
()
def
find_duplicate_task_id
(
self
,
data_tasks
:
List
[
str
],
descriptor
:
dict
)
->
Union
[
str
,
None
]:
...
...
manage.py
View file @
90b612aa
import
os
from
flask_script
import
Manager
from
fractalis
import
app
,
redis
,
sync
,
celery
import
fractalis.cleanup
from
fractalis
import
app
manager
=
Manager
(
app
)
@
celery
.
task
@
manager
.
command
def
janitor
():
"""Ideally this is maintained by a systemd service to cleanup redis and the
file system while Fractalis is running.
"""
tmp_dir
=
app
.
config
[
'FRACTALIS_TMP_DIR'
]
tracked_files
=
[
key
.
split
(
':'
)[
1
]
for
key
in
redis
.
scan_iter
(
'data:*'
)]
cached_files
=
[
f
for
f
in
os
.
listdir
(
tmp_dir
)
if
os
.
path
.
isfile
(
os
.
path
.
join
(
tmp_dir
,
f
))]
for
cached_file
in
cached_files
:
if
cached_file
not
in
tracked_files
:
sync
.
remove_file
(
os
.
path
.
join
(
tmp_dir
,
cached_file
))
fractalis
.
cleanup
.
janitor
.
delay
()
if
__name__
==
"__main__"
:
...
...
tests/unit/etls/
transmart/
test_etlhandler.py
→
tests/unit/etls/test_etlhandler.py
View file @
90b612aa
File moved
tests/unit/test_
manage
.py
→
tests/unit/test_
cleanup
.py
View file @
90b612aa
...
...
@@ -3,7 +3,7 @@
import
os
from
pathlib
import
Path
import
m
an
age
from
fractalis.cleanup
import
j
an
itor
from
fractalis
import
app
,
redis
...
...
@@ -14,7 +14,7 @@ class TestManage:
tmp_dir
=
app
.
config
[
'FRACTALIS_TMP_DIR'
]
os
.
makedirs
(
tmp_dir
,
exist_ok
=
True
)
Path
(
os
.
path
.
join
(
tmp_dir
,
'abc'
)).
touch
()
manage
.
janitor
()
janitor
()
assert
not
os
.
path
.
exists
(
os
.
path
.
join
(
tmp_dir
,
'abc'
))
def
test_janitor_does_not_remove_tracked_files
(
self
):
...
...
@@ -22,5 +22,5 @@ class TestManage:
os
.
makedirs
(
tmp_dir
,
exist_ok
=
True
)
Path
(
os
.
path
.
join
(
tmp_dir
,
'abc'
)).
touch
()
redis
.
set
(
'data:abc'
,
''
)
manage
.
janitor
()
janitor
()
assert
os
.
path
.
exists
(
os
.
path
.
join
(
tmp_dir
,
'abc'
))
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment