Commit fc25056a authored by Patrick May's avatar Patrick May
Browse files

new gaia branch

parents
Pipeline #15269 failed with stages
in 0 seconds
.*
!.gitignore
!.gitlab-ci.yml
*~
conf/userconfig.imp.json
output/*
db
__pycache__
gitold/
databases/*
tmp/*
IMPnew/*
IMPAnna/*
bin/*
lib/*
docker/*
dependencies/*
stages:
- dag
- init
- preprocessing
- assembly
- analysis
- binning
- manual
databases:
stage: manual
when: manual
script:
- pip3.4 install --editable . --user
- mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d $CI_PROJECT_DIR/$CI_BUILD_ID init
before_script:
- pip3.4 install --editable . --user
# INIT
i-cl:
stage: init
script:
- pytest
i-with-another-filter:
stage: init
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db init --index $CI_PROJECT_DIR/test/small.fa
# DAG
# see if the snakemake graph is resolving to the end
dag-mgmt-default:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID -x "snakemake -n"
dag-mgmt-maxbin:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -b maxbin -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID -x "snakemake -n"
dag-mgmt-idba:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -a idba -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID -x "snakemake -n"
dag-mgmt-nf:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID -x "snakemake -n"
dag-mgmt-nf2:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID -x "snakemake -n" --no-filtering
dag-mg-default:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n"
dag-mg-nf:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n"
dag-mg-nf2:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n" --no-filtering
dag-mt-default:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db run -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n"
dag-mt-nf:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db run -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n"
dag-mt-nf2:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db run -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n" --no-filtering
dag-mgmt-no-binning:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -b no run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID -x "snakemake -n"
dag-mt-no-binning:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -b no run -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n"
dag-mg-no-binning:
stage: dag
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -b no run -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics -x "snakemake -n"
# PREPROCESSING
p-mgmt-default:
stage: preprocessing
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db preprocessing -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
p-mgmt-nf:
stage: preprocessing
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db preprocessing -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
p-mg-default:
stage: preprocessing
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db preprocessing -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics --single-step
p-mg-nf:
stage: preprocessing
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db preprocessing -m $CI_PROJECT_DIR/test/MG.R1.small.fq -m $CI_PROJECT_DIR/test/MG.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics --single-step
p-mt-default:
stage: preprocessing
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db preprocessing -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics --single-step
p-mt-nf:
stage: preprocessing
script:
- impy -s . -c $CI_PROJECT_DIR/test/no_filtering.conf.json -d /mnt/data/db preprocessing -t $CI_PROJECT_DIR/test/MT.R1.small.fq -t $CI_PROJECT_DIR/test/MT.R2.small.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-omics --single-step
# Assembly
a-mgmt-idba:
stage: manual
when: manual
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -a idba assembly -m /mnt/data/input/assembly-megahit/mg.r1.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.r2.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.se.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.r1.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.r2.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.se.preprocessed.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
a-mg-idba:
stage: manual
when: manual
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -a idba assembly -m /mnt/data/input/assembly-megahit/mg.r1.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.r2.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.se.preprocessed.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
a-mt-idba:
stage: manual
when: manual
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -a idba assembly -t /mnt/data/input/assembly-megahit/mt.r1.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.r2.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.se.preprocessed.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
a-mgmt-megahit:
stage: assembly
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -a megahit assembly -m /mnt/data/input/assembly-megahit/mg.r1.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.r2.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.se.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.r1.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.r2.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.se.preprocessed.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
a-mg-megahit:
stage: assembly
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -a megahit assembly -m /mnt/data/input/assembly-megahit/mg.r1.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.r2.preprocessed.fq -m /mnt/data/input/assembly-megahit/mg.se.preprocessed.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
a-mt-megahit:
stage: assembly
script:
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -a megahit assembly -t /mnt/data/input/assembly-megahit/mt.r1.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.r2.preprocessed.fq -t /mnt/data/input/assembly-megahit/mt.se.preprocessed.fq -o $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
an-mgmt:
stage: analysis
script:
- mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
- cp -r /mnt/data/input/analysis-mgmt/* $CI_PROJECT_DIR/$CI_BUILD_ID
- impy -e IMP_SUDO=sudo -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db analysis --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
an-mg:
stage: analysis
script:
- mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
- cp -r /mnt/data/input/analysis-mg/* $CI_PROJECT_DIR/$CI_BUILD_ID
- impy -e IMP_SUDO=sudo -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db analysis --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
an-mt:
stage: analysis
script:
- mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
- cp -r /mnt/data/input/analysis-mt/* $CI_PROJECT_DIR/$CI_BUILD_ID
- impy -e IMP_SUDO=sudo -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db analysis --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
# binny-mgmt:
# stage: binning
# script:
# - mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
# - cp -r /mnt/data/input/binny-mgmt/* $CI_PROJECT_DIR/$CI_BUILD_ID
# - impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db binning --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
# binny-mg:
# stage: binning
# script:
# - mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
# - cp -r /mnt/data/input/binny-mg/* $CI_PROJECT_DIR/$CI_BUILD_ID
# - impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db binning --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
maxbin-mgmt:
stage: manual
when: manual
script:
- mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
- cp -r /mnt/data/input/maxbin-mgmt/* $CI_PROJECT_DIR/$CI_BUILD_ID
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -b maxbin binning --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step
maxbin-mg:
stage: manual
when: manual
script:
- mkdir -p $CI_PROJECT_DIR/$CI_BUILD_ID
- cp -r /mnt/data/input/maxbin-mg/* $CI_PROJECT_DIR/$CI_BUILD_ID
- impy -s . -c $CI_PROJECT_DIR/test/default.conf.json -d /mnt/data/db -b maxbin binning --data-dir $CI_PROJECT_DIR/$CI_BUILD_ID --single-step --single-omics
# v 1.4.1
* fix command line bug (check config file exists)
* fix command line bug (loading container from local tarball)
* handling cases where no SE reads after preprocessing
* fix MT "binning" step
* command line handle environment variable
* remove deprecated IMP script (use impy instead)
# v 1.4.0
* Update command line utility: impy
* Integrate more tools into the pipeline (e.g. MaxBin)
* Part of the pipeline can be launched without the others
* Testing pipeline workflow and tools
* Update container to version 1.4
# v 1.2.0
* Upgrade Snakemake to the latest Version
* Current version of the IMP code is shipped inside the docker container
# v 1.1.1
* Change megahit version
MIT License
Copyright (c) 2016 Luxembourg Centre for Systems Biomedicine
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
include *.rst
# Upload and release changes of impy to [PyPI](https://pypi.python.org/pypi).
* Login to PyPI.
* Makes changes in `setup.py` (version, ...)
* Upload the package.
```
python setup.py sdist upload
```
Rscript -e "install.packages(c('caTools','fpc','FNN','diptest','mixtools','gclus'),repos='http://cran.us.r-project.org')"
# annotateHMMs_Cancerome.sh
# run HMMs until will be added to the Prokka output
# countsAnno.sh
# count reads per annotation
# countsProduct.sh
# edenfalls sind in dem /work/projects/ecosystem_biology/cancerome/assemblies directory mehrere IMP assemblies mit anschließenden Schritten
# aber ich bin nicht sicher, ob die Version von IMP, die ich dafür benutzt hab, genau den gleichen output macht wie das, was wir jetzt haben
# so oder so, danke erstmal für die Geduld und hab einen schönen Abend
#
###
IMP
###
The Integrated Meta-omic Pipeline (IMP) is developed to perform large-scale, reproducible and automated integrative reference free analysis of metagenomic and metatranscriptomic data. IMP also performs single omic (i.e. metagenomic-only and metatrancriptomic-only) analysis as an additional functionality.
*************************
Documentation and website
*************************
All documentation and resources can be found : `here <http://r3lab.uni.lu/web/imp/doc.html>`_.
All components used to develop IMP workflow care addressed under the `R3lab frozen pages <http://r3lab.uni.lu/frozen/imp>`_.
# include configuration file
include:
"rules/ini/config"
# define the data types used and the assembly
if MG and MT:
TYPES = ['mg', 'mt']
ASS = 'mgmt'
elif MG:
TYPES = ['mg']
ASS = 'mg'
elif MT:
TYPES = ['mt']
ASS = 'mt'
workdir:
OUTPUTDIR
# include rules for the workflow based on the input parameters
include:
"rules/data.input.rules"
# INTEGRATIVE MG-MT workflow
if MG and MT:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/integrative/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/integrative/Assembly"
# if 'analysis' in IMP_STEPS:
# include:
# "workflows/integrative/Analysis"
# if 'binning' in IMP_STEPS:
# include:
# "workflows/integrative/Binning"
# if 'report' in IMP_STEPS:
# include:
# "workflows/integrative/Report"
# Single omics MG workflow
elif MG:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/single_omics/mg/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/single_omics/mg/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/single_omics/mg/Analysis"
if 'binning' in IMP_STEPS:
include:
"workflows/single_omics/mg/Binning"
if 'report' in IMP_STEPS:
include:
"workflows/single_omics/mg/Report"
elif MT:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/single_omics/mt/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/single_omics/mt/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/single_omics/mt/Analysis"
if 'binning' in IMP_STEPS:
include:
"workflows/single_omics/mt/Binning2"
if 'report' in IMP_STEPS:
include:
"workflows/single_omics/mt/Report"
else:
raise Exception('No input data.')
inputs = []
if 'preprocessing' in IMP_STEPS:
inputs.append('preprocessing.done')
if 'assembly' in IMP_STEPS:
inputs.append('assembly.done')
if 'analysis' in IMP_STEPS:
inputs.append('analysis.done')
if 'binning' in IMP_STEPS:
inputs.append('binning.done')
if 'report' in IMP_STEPS:
inputs.append('report.done')
# master command
rule ALL:
input:
inputs
output:
touch('workflow.done')
# include configuration file
include:
"rules/ini/config"
# define the data types used and the assembly
if MG and MT:
TYPES = ['mg', 'mt']
ASS = 'mgmt'
elif MG:
TYPES = ['mg']
ASS = 'mg'
elif MT:
TYPES = ['mt']
ASS = 'mt'
workdir:
OUTPUTDIR
# include rules for the workflow based on the input parameters
include:
"rules/data.input.rules"
# INTEGRATIVE MG-MT workflow
if MG and MT:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/integrative/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/integrative/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/integrative/AnalysisC"
if 'binning' in IMP_STEPS:
include:
"workflows/integrative/Binning2"
# Single omics MG workflow
elif MG:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/single_omics/mg/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/single_omics/mg/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/single_omics/mg/AnalysisC"
if 'binning' in IMP_STEPS:
include:
"workflows/single_omics/mg/Binning2"
elif MT:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/single_omics/mt/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/single_omics/mt/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/single_omics/mt/AnalysisC"
if 'binning' in IMP_STEPS:
include:
"workflows/single_omics/mt/Binning2"
else:
raise Exception('No input data.')
inputs = []
if 'preprocessing' in IMP_STEPS:
inputs.append('preprocessing.done')
if 'assembly' in IMP_STEPS:
inputs.append('assembly.done')
if 'analysis' in IMP_STEPS:
inputs.append('analysis.done')
if 'binning' in IMP_STEPS:
inputs.append('binning.done')
# master command
rule ALL:
input:
inputs
output:
touch('workflow.done')
# include configuration file
include:
"rules/ini/config"
# define the data types used and the assembly
if MG and MT:
TYPES = ['mg', 'mt']
ASS = 'mgmt'
elif MG:
TYPES = ['mg']
ASS = 'mg'
elif MT:
TYPES = ['mt']
ASS = 'mt'
workdir:
OUTPUTDIR
# include rules for the workflow based on the input parameters
include:
"rules/data.input.rules"
# INTEGRATIVE MG-MT workflow
if MG and MT:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/integrative/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/integrative/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/integrative/AnalysisC2"
if 'binning' in IMP_STEPS:
include:
"workflows/integrative/Binning"
if 'report' in IMP_STEPS:
include:
"workflows/integrative/Report"
# Single omics MG workflow
elif MG:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/single_omics/mg/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/single_omics/mg/Assembly"
if 'analysis' in IMP_STEPS:
include:
"workflows/single_omics/mg/AnalysisC2"
if 'binning' in IMP_STEPS:
include:
"workflows/single_omics/mg/Binning"
if 'report' in IMP_STEPS:
include:
"workflows/single_omics/mg/Report"
elif MT:
if 'preprocessing' in IMP_STEPS:
include:
"workflows/single_omics/mt/Preprocessing"
if 'assembly' in IMP_STEPS:
include:
"workflows/single_omics/mt/Assembly"
if 'analysis' in IMP_STEPS:
include: