Skip to content

Commit

Permalink
Create report and add run data, have proc containers defined in confi…
Browse files Browse the repository at this point in the history
…g so we can put versions in the report
  • Loading branch information
glormph committed Sep 18, 2024
1 parent 076aac3 commit 513b31c
Show file tree
Hide file tree
Showing 11 changed files with 564 additions and 126 deletions.
2 changes: 2 additions & 0 deletions assets/bulma.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion assets/report.html
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ <h4 class="subtitle is-4">
<div class="mt-5 tabs is-boxed">
<ul>
<li id="resultstab" class="is-active"><a>Results</a></li>
<li id="rundatatab"><a>Run data</a></li>
<li id="psmqctab"> <a> PSMs </a> </li>
<li id="featqctab"> <a>Peptides/proteins/genes </a> </li>
{% if ptmtables.summary %}
Expand All @@ -32,6 +31,7 @@ <h4 class="subtitle is-4">
{% if expplots.pca %}
<li id="deqctab"><a>Expression</a> </li>
{% endif %}
<li id="rundatatab"><a>Run data</a></li>
</ul>
</div>
</div>
Expand Down
12 changes: 2 additions & 10 deletions conf/base.config
Original file line number Diff line number Diff line change
@@ -1,13 +1,5 @@
/*
* -------------------------------------------------
* lehtiolab/ddamsproteomics Nextflow base config file
* -------------------------------------------------
* A 'blank slate' config file, appropriate for general
* use on most high performace compute environments.
* Assumes that all software is installed and available
* on the PATH. Runs in `local` mode - all jobs will be
* run on the logged in environment.
*/
singularity.enabled = false
docker.enabled = false

process {

Expand Down
62 changes: 62 additions & 0 deletions containers.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
{
"msstitch": {
"version": "3.16",
"singularity": "https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0",
"docker": "quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0"
},
"python": {
"version": "3.12",
"docker": "python:3.12",
"singularity": "docker://python:3.12"
},
"proteowizard": {
"version": "3.0.24172",
"docker": "proteowizard/pwiz-skyline-i-agree-to-the-vendor-licenses:3.0.24172-63d00b1",
"singularity": "docker://proteowizard/pwiz-skyline-i-agree-to-the-vendor-licenses:3.0.24172-63d00b1"
},
"openms": {
"version": "3.1.0",
"docker": "quay.io/biocontainers/openms:3.1.0--h191ead1_4",
"singularity": "https://depot.galaxyproject.org/singularity/openms:3.1.0--h191ead1_4"
},
"dinosaur": {
"version": "1.0",
"docker": "lehtiolab/ddamsproteomics:2.18",
"singularity": "docker://"
},
"hardklor": {
"version": "2.3.2",
"docker": "quay.io/biocontainers/hardklor:2.3.2--he1b5a44_0",
"singularity": "https://depot.galaxyproject.org/singularity/hardklor:2.3.2--he1b5a44_0"
},
"kronik": {
"version": "2.20",
"docker": "quay.io/biocontainers/kronik:2.20--h4ac6f70_6",
"singularity": "https://depot.galaxyproject.org/singularity/kronik:2.20--h4ac6f70_6"
},
"sqlite": {
"version": "3.33.0",
"docker": "quay.io/biocontainers/sqlite:3.33.0",
"singularity": "https://depot.galaxyproject.org/singularity/sqlite:3.33.0"
},
"deqms": {
"version": "1.18.0",
"docker": "lehtiolab/deqms",
"singularity": "docker://lehtiolab/deqms"
},
"msgfplus": {
"version": "2023.01.1202",
"docker": "quay.io/biocontainers/msgf_plus:2023.01.1202--hdfd78af_0",
"singularity": "https://depot.galaxyproject.org/singularity/msgf_plus:2023.01.1202--hdfd78af_0"
},
"percolator": {
"version": "3.5",
"docker": "quay.io/biocontainers/percolator:3.5--hfd1433f_1",
"singularity": "https://depot.galaxyproject.org/singularity/percolator:3.5--hfd1433f_1"
},
"luciphor2": {
"version": "2020_04_03",
"docker": "quay.io/biocontainers/luciphor2:2020_04_03--hdfd78af_1",
"singularity": "https://depot.galaxyproject.org/singularity/luciphor2:2020_04_03--hdfd78af_1"
}
}
137 changes: 90 additions & 47 deletions main.nf
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
#!/usr/bin/env nextflow

include { msgf_info_map; listify; stripchars_infile; get_regex_specialchars } from './modules.nf'
import groovy.json.JsonSlurper
include { paramsSummaryMap } from 'plugin/nf-validation'

include { msgf_info_map; listify; stripchars_infile; get_regex_specialchars; read_header } from './modules.nf'
include { MSGFPERCO } from './workflows/msgf_perco.nf'
include { PTMANALYSIS } from './workflows/ptms.nf'
include { MATCH_SEQUENCES } from './workflows/match_sequences.nf'
Expand Down Expand Up @@ -80,9 +83,7 @@ process get_software_versions {


process createTargetDecoyFasta {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.15--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.15--pyhdfd78af_0'}"
label 'msstitch'

input:
path(tdb)
Expand Down Expand Up @@ -125,7 +126,6 @@ def plate_or_no(it, length) {


process centroidMS1 {
container 'chambm/pwiz-skyline-i-agree-to-the-vendor-licenses:3.0.20066-729ef9c41'

input:
tuple val(setname), path(infile), val(instr)
Expand All @@ -144,9 +144,6 @@ process centroidMS1 {


process isobaricQuant {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/openms:2.9.1--h135471a_1' :
'quay.io/biocontainers/openms:2.9.1--h135471a_1'}"

input:
tuple val(setname), val(parsed_infile), path(infile), val(instr), val(isobtype)
Expand Down Expand Up @@ -192,9 +189,6 @@ container 'lehtiolab/ddamsproteomics:2.18'


process hardklor {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/hardklor:2.3.2--he1b5a44_0' :
'quay.io/biocontainers/hardklor:2.3.2--he1b5a44_0'}"

input:
tuple val(sample), path(infile), path(hkconf)
Expand All @@ -213,9 +207,7 @@ process hardklor {


process kronik {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/kronik:2.20--h4ac6f70_6' :
'quay.io/biocontainers/kronik:2.20--h4ac6f70_6'}"

input:
tuple val(sample), val(parsed_infile), path('hardklor.out')

Expand All @@ -229,12 +221,10 @@ process kronik {
}



process PTMClean {
label 'sqlite'
// FIXME can be in PTM wf?
// In PTMS we need to delete all PSMs since we will rebuild it
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/sqlite:3.33.0' : 'quay.io/biocontainers/sqlite:3.33.0'}"

input:
path('db.sqlite')
Expand Down Expand Up @@ -268,6 +258,7 @@ process PTMClean {


process complementSpectraLookupCleanPSMs {
label 'msstitch'

input:
tuple val(in_setnames), path(mzmlfiles), val(platenames), path(tlup), path(dlup), path(tpsms), path(dpsms), path(ptmpsms)
Expand All @@ -284,7 +275,7 @@ process complementSpectraLookupCleanPSMs {
# If this is an addition to an old lookup, copy it and extract set names
cp ${tlup} target_db.sqlite
cp ${dlup} decoy_db.sqlite
sqlite3 target_db.sqlite "SELECT set_name FROM biosets" > old_setnames
python3 -c "exec(\\"import sqlite3;c=sqlite3.Connection('${tlup}');x=c.execute('SELECT set_name FROM biosets').fetchall();print('\\\\\\n'.join([y[0] for y in x]))\\")" > old_setnames
# If adding to old lookup: grep new setnames in old and run msstitch deletesets if they match
# use -x for grep since old_setnames must grep whole word
if grep -xf old_setnames <(echo ${setnames.join('\n')} )
Expand All @@ -309,9 +300,8 @@ process complementSpectraLookupCleanPSMs {


process createNewSpectraLookup {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.15--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.15--pyhdfd78af_0'}"

label 'msstitch'

input:
tuple val(setnames), file(mzmlfiles), val(platenames)
Expand All @@ -329,11 +319,8 @@ process createNewSpectraLookup {


process quantLookup {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.15--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.15--pyhdfd78af_0'}"

// FIXME publishDir "${params.outdir}", mode: 'copy', overwrite: true, saveAs: {it == 'target.sqlite' ? 'quant_lookup.sql' : null }

label 'msstitch'

input:
tuple val(mzmlnames), path(isofns), path(ms1fns), path(tlookup)
Expand All @@ -353,9 +340,8 @@ process quantLookup {


process createPSMTable {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0'}"

label 'msstitch'

input:
tuple val(td), path(psms), path('lookup'), path(tdb), path(ddb), path('oldpsms'), val(complementary_run), val(do_ms1), val(do_isobaric), val(onlypeptides)
Expand Down Expand Up @@ -389,7 +375,8 @@ process createPSMTable {


process peptidePiAnnotation {
container "python:3.12"

label 'python'

input:
tuple path('psms'), val(strips), path('hirief_training_pep')
Expand All @@ -407,9 +394,8 @@ process peptidePiAnnotation {


process splitPSMs {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0'}"

label 'msstitch'

input:
tuple val(td), path('psms'), val(setnames)
Expand All @@ -425,9 +411,8 @@ process splitPSMs {


process splitTotalProteomePSMs {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0'}"

label 'msstitch'

input:
tuple path('tppsms_in'), val(setnames)
Expand All @@ -443,9 +428,8 @@ process splitTotalProteomePSMs {


process makePeptides {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0'}"

label 'msstitch'

input:
tuple val(td), val(setname), path('psms'), val(setisobaric), val(denoms), val(keepnapsms_quant), val(normalize_isob), val(do_ms1)
Expand Down Expand Up @@ -473,9 +457,8 @@ process makePeptides {


process proteinGeneSymbolTableFDR {
container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0'}"

label 'msstitch'

input:
tuple val(setname), path('tpeptides'), path('tpsms'), path('dpeptides'), path(tfasta), path(dfasta), val(acctype), val(do_ms1), val(isobaric), val(denom), val(keepnapsms_quant), val(normalize)
Expand Down Expand Up @@ -517,6 +500,10 @@ process proteinGeneSymbolTableFDR {


process sampleTableCheckClean {

// Runs no python but that container has the tools needed
label 'python'

input:
tuple path('sampletable'), val(do_deqms)

Expand All @@ -541,9 +528,7 @@ process sampleTableCheckClean {

process proteinPeptideSetMerge {

container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://depot.galaxyproject.org/singularity/msstitch:3.16--pyhdfd78af_0' :
'quay.io/biocontainers/msstitch:3.16--pyhdfd78af_0'}"
label 'msstitch'

input:
tuple val(setnames), val(acctype), path(tables), path(lookup), path(sampletable_with_special_chars), path('sampletable_no_special_chars'), val(do_isobaric), val(do_ms1), val(proteinconflvl), val(do_pgroup), val(do_deqms)
Expand Down Expand Up @@ -581,8 +566,6 @@ process proteinPeptideSetMerge {


process DEqMS {
container 'lehtiolab/deqms'

input:
tuple val(acctype), path('grouptable'), path('sampletable')

Expand All @@ -601,6 +584,9 @@ process DEqMS {
}


def mzml_list = []
def header = []

workflow {

// Validate and set file inputs
Expand Down Expand Up @@ -1145,3 +1131,60 @@ if (!params.quantlookup) {
.subscribe { it.copyTo("${params.outdir}/${it.baseName}.${it.extension}") }
}


workflow.onComplete {
if (workflow.success) {
def libfile = file("${params.outdir}/libs.js")
def libs = libfile.readLines()
def bulma = file("${baseDir}/assets/bulma.js").readLines()
def psmap = paramsSummaryMap(workflow)
def files_header = read_header(params.input)
files_header[0] = 'filename'
files_header.remove('mzmlfile')
infiles = mzml_list.collect { fn -> files_header.collect { fn[it] }}
infiles.add(0, files_header)

// Parse containers file to get software versions
def jsonslurp = new JsonSlurper()
def containers_versions = jsonslurp.parseText(new File("${baseDir}/containers.json").text)
.collectEntries { k, v -> [v[workflow.containerEngine], [k, v.version]] }

// Get processes used from trace to output the software versions used in pipeline
def label_containers = psmap['Core Nextflow options']['container']
.findAll { it.key.contains('withLabel:') }
.collect { it.value }
.collect { [containers_versions[it][0], containers_versions[it][1], it] }
def sw_versions = label_containers + file("${params.outdir}/pipeline_info/execution_trace.txt").readLines()[1..-1]
.collect { it.tokenize('\t')[3]
.replaceFirst(~/^.+:/, '')
.replaceFirst(~/\s\([0-9]+\)$/, '')
}.unique()
.collect { psmap['Core Nextflow options']['container'][it] }
.findAll { it } // remove null in case process is not defined in container-config
.unique()
.collect { [containers_versions[it][0], containers_versions[it][1], it] }

// Set name
// if not wf.runName (-name or auto) is like "crazy_euler" or other "{adjective}_{scientist}"
if (!params.name && !(workflow.runName ==~ /[a-z]+_[a-z]+/) ) {
runname = workflow.runName
} else if (!params.name) {
runname = 'untitled'
} else {
runname = params.name
}
def fields = [runname: runname,
sw_versions: sw_versions,
params: psmap['Other parameters'],
infiles: infiles,
libs: libs, bulma: bulma]
def rf = new File("${params.outdir}/report_groovy_template.html")
def temp_engine = new groovy.text.StreamingTemplateEngine()
def report_template = temp_engine.createTemplate(rf).make(fields)
def report_html = report_template.toString()
def output_rf = new File( params.outdir, "report.html" )
output_rf.withWriter { w -> w << report_html }
rf.delete()
libfile.delete()
}
}
Loading

0 comments on commit 513b31c

Please sign in to comment.