Skip to content

Commit

Permalink
Merge pull request #316 from biowdl/fastp
Browse files Browse the repository at this point in the history
Add fastp and picard CollectInsertSizeMetrics
  • Loading branch information
DavyCats authored Feb 17, 2023
2 parents 07c44ca + 19652d8 commit 9e946c7
Show file tree
Hide file tree
Showing 4 changed files with 168 additions and 4 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ that users understand how the changes affect the new version.
-->
version 5.1.0-dev
---------------------------
+ Add a task for fastp.
+ Add a task for picard CollectInsertSizeMetrics.
+ Increased the timeMinutes runtime attribute for manta (somatic and germline) to `2880`.
+ Add a task for GRIDSS somatic filtering.
+ Add a task to generate a panel of normals BED and BEDPE file for GRIDSS.
Expand Down
110 changes: 110 additions & 0 deletions fastp.wdl
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
version 1.0

# MIT License
#
# Copyright (c) 2022 Leiden University Medical Center
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
task Fastp {
input {
File read1
File read2
String outputPathR1
String outputPathR2
String htmlPath
String jsonPath

Int compressionLevel = 1
Boolean correction = false
Int lengthRequired = 15
Int? split
Boolean performAdapterTrimming = true

Int threads = 4
String memory = "50GiB"
Int timeMinutes = 1 + ceil(size([read1, read2], "G") * 6.0 / threads)
String dockerImage = "quay.io/biocontainers/fastp:0.23.2--h5f740d0_3"

Int? noneInt
}

String outputDirR1 = sub(outputPathR1, basename(outputPathR1), "")
String outputDirR2 = sub(outputPathR2, basename(outputPathR2), "")

Int? effectiveSplit = if select_first([split, 1]) > 1 then split else noneInt

command <<<
set -e
mkdir -p $(dirname ~{outputPathR1})
mkdir -p $(dirname ~{outputPathR2})
mkdir -p $(dirname ~{htmlPath})
mkdir -p $(dirname ~{jsonPath})

# predict output paths
seq 1 ~{if defined(effectiveSplit) then effectiveSplit else "2"} | awk '{print "~{outputDirR1}/"$0".~{basename(outputPathR1)}"}' > r1_paths
seq 1 ~{if defined(effectiveSplit) then effectiveSplit else "2"} | awk '{print "~{outputDirR2}/"$0".~{basename(outputPathR2)}"}' > r2_paths
fastp \
-i ~{read1} \
~{"-I " + read2} \
-o ~{outputPathR1} \
~{"-O " + outputPathR2} \
-h ~{htmlPath} \
-j ~{jsonPath} \
-z ~{compressionLevel} \
~{if correction then "--correction" else ""} \
--length_required ~{lengthRequired} \
--thread ~{select_first([effectiveSplit, threads])} \
~{"--split " + effectiveSplit} \
~{if defined(effectiveSplit) then "-d 0" else ""} \
~{if performAdapterTrimming then "" else "--disable_adapter_trimming"}
>>>

output {
File htmlReport = htmlPath
File jsonReport = jsonPath
Array[File] clippedR1 = if defined(effectiveSplit) then read_lines("r1_paths") else [outputPathR1]
Array[File] clippedR2 = if defined(effectiveSplit) then read_lines("r2_paths") else [outputPathR2]
}

runtime {
cpu: select_first([effectiveSplit, threads])
memory: memory
time_minutes: timeMinutes
docker: dockerImage
}

parameter_meta {
read1: {description: "The R1 fastq file.", category: "required"}
read2: {description: "The R2 fastq file.", category: "required"}
outputPathR1: {description: "The output path for the R1 file.", category: "required"}
outputPathR2: {description: "The output path for the R2 file.", category: "required"}
htmlPath: {description: "The path to write the html report to.", category: "required"}
jsonPath: {description: "The path to write the json report to.", category: "required"}
compressionLevel: {description: "The compression level to use for the output.", category: "advanced"}
correction: {description: "Whether or not to apply overlap based correction.", category: "advanced"}
lengthRequired: {description: "The minimum read length.", category: "advanced"}
split: {description: "The number of chunks to split the files into. Number of threads will be set equal to the amount of splits.", category: "common"}
performAdapterTrimming: {description: "Whether adapter trimming should be performed or not.", category: "advanced"}
threads: {description: "The number of threads to use. Only used if the split input is not set.", category: "advanced"}
memory: {description: "The amount of memory this job will use.", category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
dockerImage: {description: "The docker image used for this task. Changing this may result in errors which the developers may choose not to address.", category: "advanced"}
}
}
8 changes: 4 additions & 4 deletions hmftools.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ task Amber {
File referenceFastaDict

Int threads = 2
String memory = "70GiB"
String javaXmx = "64G"
Int timeMinutes = 240
String memory = "85GiB"
String javaXmx = "80G"
Int timeMinutes = 480
String dockerImage = "quay.io/biocontainers/hmftools-amber:3.5--0"
}

Expand Down Expand Up @@ -604,7 +604,7 @@ task Linx {
File transExonDataCsv
File transSpliceDataCsv

String memory = "9iB"
String memory = "9GiB"
String javaXmx = "8G"
Int timeMinutes = 10
String dockerImage = "quay.io/biocontainers/hmftools-linx:1.18--hdfd78af_0"
Expand Down
52 changes: 52 additions & 0 deletions picard.wdl
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,58 @@ task CollectHsMetrics {
}
}

task CollectInsertSizeMetrics {
input {
File inputBam
File inputBamIndex

Float? minimumPercentage
String basename = "./insertSize_metrics"

String memory = "5GiB"
String javaXmx = "4G"
Int timeMinutes = 1 + ceil(size(inputBam, "GiB") * 6)
String dockerImage = "quay.io/biocontainers/picard:2.23.2--0"
}

command {
set -e
mkdir -p "$(dirname ~{basename})"
picard -Xmx~{javaXmx} -XX:ParallelGCThreads=1 \
CollectInsertSizeMetrics \
I=~{inputBam} \
O=~{basename}.txt \
H=~{basename}.pdf \
~{"M=" + minimumPercentage}
}

output {
File metricsTxt = "~{basename}.txt"
File metricsPdf = "~{basename}.pdf"
}

runtime {
docker: dockerImage
time_minutes: timeMinutes
memory: memory
}

parameter_meta {
# inputs
inputBam: {description: "The input BAM file for which metrics will be collected.", category: "required"}
inputBamIndex: {description: "The index of the input BAM file.", category: "required"}
minimumPercentage: {description: "Equivalent to picard CollectInsertSizeMetrics' `M` option.", category: "advanced"}
basename: {description: "The basename for the output files.", category: "common"}

memory: {description: "The amount of memory this job will use.", category: "advanced"}
javaXmx: {description: "The maximum memory available to the program. Should be lower than `memory` to accommodate JVM overhead.",
category: "advanced"}
timeMinutes: {description: "The maximum amount of time the job will run in minutes.", category: "advanced"}
dockerImage: {description: "The docker image used for this task. Changing this may result in errors which the developers may choose not to address.",
category: "advanced"}
}
}

task CollectMultipleMetrics {
input {
File inputBam
Expand Down

0 comments on commit 9e946c7

Please sign in to comment.