Skip to content

Commit fc59781

Browse files
author
Pablo Riesgo Ferreiro
committed
Merge branch 'develop' into 'master'
Release v1.3.0 See merge request tron/tron-bam-preprocessing!12
2 parents 1441199 + d779eb2 commit fc59781

File tree

4 files changed

+14
-14
lines changed

4 files changed

+14
-14
lines changed

Makefile

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ clean:
99

1010
test:
1111
nextflow main.nf -profile test,conda --output output/test1
12-
#nextflow main.nf -profile test,conda --skip_bqsr --output output/test2
13-
#nextflow main.nf -profile test,conda --skip_realignment --output output/test3
14-
#nextflow main.nf -profile test,conda --skip_deduplication --output output/test4
15-
#nextflow main.nf -profile test,conda --output output/test5 --skip_metrics
12+
nextflow main.nf -profile test,conda --skip_bqsr --output output/test2
13+
nextflow main.nf -profile test,conda --skip_realignment --output output/test3
14+
nextflow main.nf -profile test,conda --skip_deduplication --output output/test4
15+
nextflow main.nf -profile test,conda --output output/test5 --skip_metrics
1616
nextflow main.nf -profile test,conda --output output/test6 --intervals false
1717
nextflow main.nf -profile test,conda --output output/test7 --hs_metrics_target_coverage target_coverage.txt --hs_metrics_per_base_coverage per_base_coverage.txt
18-
#nextflow main.nf -profile test,conda --output output/test8 --hs_metrics_target_coverage target_coverage.txt --hs_metrics_per_base_coverage per_base_coverage.txt --collect_hs_metrics_min_base_quality 10 --collect_hs_metrics_min_mapping_quality 10
18+
nextflow main.nf -profile test,conda --output output/test8 --hs_metrics_target_coverage target_coverage.txt --hs_metrics_per_base_coverage per_base_coverage.txt --collect_hs_metrics_min_base_quality 10 --collect_hs_metrics_min_mapping_quality 10

environment.yml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
# You can use this file to create a conda environment for this pipeline:
22
# conda env create -f environment.yml
3-
name: tronflow-bam-preprocessing-1.2.1
3+
name: tronflow-bam-preprocessing-1.3.0
44
channels:
55
- conda-forge
66
- bioconda
77
- defaults
88
dependencies:
9-
- openjdk=8.0.282
109
- bioconda::gatk4=4.2.0.0
1110
- bioconda::gatk=3.8

main.nf

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ process prepareBam {
8282
set val(name),
8383
val("${bam.baseName}"),
8484
val(type), file("${bam.baseName}.prepared.bam"),
85-
file("${bam.baseName}.prepared.bai") into prepared_bams, prepared_bams_for_metrics, prepared_bams_for_hs_metrics
85+
file("${bam.baseName}.prepared.bai") into prepared_bams
8686

8787
"""
8888
mkdir tmp
@@ -128,7 +128,8 @@ if (!params.skip_deduplication) {
128128

129129
output:
130130
set val(name), val(bam_name), val(type),
131-
file("${bam.baseName}.dedup.bam"), file("${bam.baseName}.dedup.bam.bai") into deduplicated_bams
131+
file("${bam.baseName}.dedup.bam"), file("${bam.baseName}.dedup.bam.bai") into deduplicated_bams,
132+
deduplicated_bams_for_metrics, deduplicated_bams_for_hs_metrics
132133
file("${bam.baseName}.dedup_metrics") optional true into deduplication_metrics
133134

134135
script:
@@ -146,7 +147,7 @@ if (!params.skip_deduplication) {
146147
}
147148
}
148149
else {
149-
deduplicated_bams = prepared_bams
150+
prepared_bams.into{ deduplicated_bams; deduplicated_bams_for_metrics; deduplicated_bams_for_hs_metrics}
150151
}
151152

152153
if (! params.skip_metrics) {
@@ -160,7 +161,7 @@ if (! params.skip_metrics) {
160161
publishDir "${publish_dir}/${name}/metrics", mode: "copy"
161162

162163
input:
163-
set name, bam_name, type, file(bam), file(bai) from prepared_bams_for_hs_metrics
164+
set name, bam_name, type, file(bam), file(bai) from deduplicated_bams_for_hs_metrics
164165

165166
output:
166167
file("*_metrics") optional true into txt_hs_metrics
@@ -200,7 +201,7 @@ if (! params.skip_metrics) {
200201
publishDir "${publish_dir}/${name}/metrics", mode: "copy"
201202

202203
input:
203-
set name, bam_name, type, file(bam), file(bai) from prepared_bams_for_metrics
204+
set name, bam_name, type, file(bam), file(bai) from deduplicated_bams_for_metrics
204205

205206
output:
206207
file("*_metrics") optional true into txt_metrics

nextflow.config

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,12 +55,12 @@ dag {
5555
//file = "${params.output}/pipeline_dag.svg"
5656
}
5757

58-
VERSION = '1.2.1'
58+
VERSION = '1.3.0'
5959
DOI = 'https://zenodo.org/badge/latestdoi/358400957'
6060

6161
manifest {
6262
name = 'TRON-Bioinformatics/tronflow-bam-preprocessing'
63-
author = 'Pablo Riesgo Ferreiro'
63+
author = 'Pablo Riesgo Ferreiro, Özlem Muslu'
6464
homePage = 'https://github.com/TRON-Bioinformatics/tronflow-bam-preprocessing'
6565
description = 'Picard and GATK BAM preprocessing pipeline'
6666
mainScript = 'main.nf'

0 commit comments

Comments
 (0)