Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updates to bring plugin in-line w/mg-scripts #93

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/qiita-plugin-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ jobs:
# seqtk is a conda-installed requirement for metapool and isn't
# installed automatically by its setup.py.
conda config --add channels bioconda
conda create -q --yes -n klp python=3.9 seqtk
conda create -q --yes -n klp python=3.9 'seqtk>=1.4'
conda activate klp

export QIITA_SERVER_CERT=`pwd`/qiita-dev/qiita_core/support_files/server.crt
Expand Down
9 changes: 5 additions & 4 deletions qp_klp/Step.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,12 +327,13 @@ def _quality_control(self, config, input_file_path):
self.master_qiita_job_id,
config['job_max_array_length'],
config['known_adapters_path'],
config['movi_executable_path'],
config['gres_value'],
config['pmls_path'],
config['additional_fastq_tags'],
bucket_size=config['bucket_size'],
length_limit=config['length_limit'],
cores_per_task=config['cores_per_task'],
movi_path=config['movi_executable_path'],
gres_value=config['gres_value'],
pmls_path=config['pmls_path'])
cores_per_task=config['cores_per_task'])

nuqc_job.run(callback=self.update_callback)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
"cores_per_task": 4,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 4,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"cores_per_task": 2,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 4,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
"cores_per_task": 2,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 2,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
"cores_per_task": 2,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 4,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"cores_per_task": 4,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 4,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
"cores_per_task": 4,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 4,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
"cores_per_task": 4,
"movi_executable_path": "/home/user/user_dir/Movi/build/movi-default",
"gres_value": 4,
"additional_fastq_tags": ["BX"],
"pmls_path": "/home/user/user_dir/human_host_filtration/scripts/qiita_filter_pmls.py"
},
"seqpro": {
Expand Down
4 changes: 2 additions & 2 deletions qp_klp/tests/data/process_all_fastq_files.sh
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,8 @@ function mux-runner () {

# minimap/samtools pair commands are now generated in NuQCJob._generate_mmi_filter_cmds()
# and passed to this template.
minimap2 -2 -ax sr -t 1 /databases/minimap2/db_1.mmi ${jobd}/seqs.interleaved.fastq -a | samtools fastq -@ 1 -f 12 -F 256 > ${jobd}/foo
minimap2 -2 -ax sr -t 1 /databases/minimap2/db_2.mmi ${jobd}/foo -a | samtools fastq -@ 1 -f 12 -F 256 > ${jobd}/bar
minimap2 -2 -ax sr -y -t 1 /databases/minimap2/db_1.mmi ${jobd}/seqs.interleaved.fastq -a | samtools fastq -@ 1 -f 12 -F 256 -T BX > ${jobd}/foo
minimap2 -2 -ax sr -y -t 1 /databases/minimap2/db_2.mmi ${jobd}/foo -a | samtools fastq -@ 1 -f 12 -F 256 -T BX > ${jobd}/bar
mv ${jobd}/bar ${jobd}/seqs.interleaved.filter_alignment.fastq
[ -e ${jobd}/foo ] && rm ${jobd}/foo
[ -e ${jobd}/bar ] && rm ${jobd}/bar
Expand Down
13 changes: 7 additions & 6 deletions qp_klp/tests/test_metagenomic_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,20 +142,21 @@ def test_metagenomic_quality_control(self):
'.trimmed.fastq.gz'))
copyfile(fp, file_path)

# Since the 'sbatch' and 'sacct' commands don't exist in the testing
# Since the 'sbatch' and 'squeue' commands don't exist in the testing
# environment, fake them by creating fakes that will output to stdout
# the metadata needed to keep run() working as intended. These faked
# binary files overwrite the versions created by test_step.py and are
# automatically deleted after testing. test_step.py sets chmod for us.
with open(join(dirname(sys.executable), 'sbatch'), 'w') as f:
# code will extract 777 for use as the fake job-id in slurm.
f.write("echo Hello 777")
# code will extract 9999999 for use as the fake job-id in slurm.
f.write("echo 'Submitted batch job 9999999'")

with open(join(dirname(sys.executable), 'sacct'), 'w') as f:
# fake sacct will return job-id 777 completed successfully.
with open(join(dirname(sys.executable), 'squeue'), 'w') as f:
# fake squeue will return job-id 9999999 completed successfully.
# faked output files created in test method() will generate
# faked results.
f.write("echo \"777|cc_fake_job.sh|COMPLETED|00:10:00|0:0\"")
f.write("echo \"ARRAY_JOB_ID,JOBID,STATE\n9999999,9999999,"
"COMPLETED\"")

# execute the quality_control() method, which will in turn call NuQC's
# run() method.
Expand Down
6 changes: 2 additions & 4 deletions qp_klp/tests/test_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,10 +336,8 @@ def _create_test_input(self, stage):
self._create_fake_bin('sbatch', "echo 'Submitted "
"batch job 9999999'")

self._create_fake_bin('sacct', "echo '9999999|99999999-9999-9999"
"-9999-999999999999.txt|COMPLETED"
"|09:53:41|0:0'")

self._create_fake_bin('squeue', "echo 'ARRAY_JOB_ID,JOBID,STATE\n"
"9999999,9999999,COMPLETED'")
if stage >= 2:
# generate dummy fastq files in ConvertJob and create an empty
# NuQCJob directory to use for testing NuQCJob initialization.
Expand Down
Loading