25
25
from .job import CommandLineJob
26
26
from .stdfsaccess import StdFsAccess
27
27
28
+ from schema_salad .sourceline import SourceLine , indent
29
+
28
30
ACCEPTLIST_EN_STRICT_RE = re .compile (r"^[a-zA-Z0-9._+-]+$" )
29
31
ACCEPTLIST_EN_RELAXED_RE = re .compile (r"^[ a-zA-Z0-9._+-]+$" ) # with spaces
30
32
ACCEPTLIST_RE = ACCEPTLIST_EN_STRICT_RE
@@ -159,11 +161,7 @@ def makeJobRunner(self): # type: () -> CommandLineJob
159
161
def makePathMapper (self , reffiles , stagedir , ** kwargs ):
160
162
# type: (List[Any], Text, **Any) -> PathMapper
161
163
dockerReq , _ = self .get_requirement ("DockerRequirement" )
162
- try :
163
- return PathMapper (reffiles , kwargs ["basedir" ], stagedir )
164
- except OSError as e :
165
- if e .errno == errno .ENOENT :
166
- raise WorkflowException (u"Missing input file %s" % e )
164
+ return PathMapper (reffiles , kwargs ["basedir" ], stagedir )
167
165
168
166
def job (self , joborder , output_callback , ** kwargs ):
169
167
# type: (Dict[Text, Text], Callable[..., Any], **Any) -> Generator[Union[CommandLineJob, CallbackJob], None, None]
@@ -286,18 +284,21 @@ def rm_pending_output_callback(output_callback, jobcachepending,
286
284
adjustDirObjs (builder .bindings , _check_adjust )
287
285
288
286
if self .tool .get ("stdin" ):
289
- j .stdin = builder .do_eval (self .tool ["stdin" ])
290
- reffiles .append ({"class" : "File" , "path" : j .stdin })
287
+ with SourceLine (self .tool , "stdin" , validate .ValidationException ):
288
+ j .stdin = builder .do_eval (self .tool ["stdin" ])
289
+ reffiles .append ({"class" : "File" , "path" : j .stdin })
291
290
292
291
if self .tool .get ("stderr" ):
293
- j .stderr = builder .do_eval (self .tool ["stderr" ])
294
- if os .path .isabs (j .stderr ) or ".." in j .stderr :
295
- raise validate .ValidationException ("stderr must be a relative path" )
292
+ with SourceLine (self .tool , "stderr" , validate .ValidationException ):
293
+ j .stderr = builder .do_eval (self .tool ["stderr" ])
294
+ if os .path .isabs (j .stderr ) or ".." in j .stderr :
295
+ raise validate .ValidationException ("stderr must be a relative path, got '%s'" % j .stderr )
296
296
297
297
if self .tool .get ("stdout" ):
298
- j .stdout = builder .do_eval (self .tool ["stdout" ])
299
- if os .path .isabs (j .stdout ) or ".." in j .stdout or not j .stdout :
300
- raise validate .ValidationException ("stdout must be a relative path" )
298
+ with SourceLine (self .tool , "stdout" , validate .ValidationException ):
299
+ j .stdout = builder .do_eval (self .tool ["stdout" ])
300
+ if os .path .isabs (j .stdout ) or ".." in j .stdout or not j .stdout :
301
+ raise validate .ValidationException ("stdout must be a relative path, got '%s'" % j .stdout )
301
302
302
303
if _logger .isEnabledFor (logging .DEBUG ):
303
304
_logger .debug (u"[job %s] command line bindings is %s" , j .name , json .dumps (builder .bindings , indent = 4 ))
@@ -389,17 +390,18 @@ def collect_output_ports(self, ports, builder, outdir, compute_checksum=True):
389
390
if _logger .isEnabledFor (logging .DEBUG ):
390
391
_logger .debug (u"Raw output from %s: %s" , custom_output , json .dumps (ret , indent = 4 ))
391
392
else :
392
- for port in ports :
393
- fragment = shortname (port ["id" ])
394
- try :
395
- ret [fragment ] = self .collect_output (port , builder , outdir , fs_access , compute_checksum = compute_checksum )
396
- except Exception as e :
397
- _logger .debug (
398
- u"Error collecting output for parameter '%s'"
399
- % shortname (port ["id" ]), exc_info = True )
400
- raise WorkflowException (
401
- u"Error collecting output for parameter '%s': %s"
402
- % (shortname (port ["id" ]), e ))
393
+ for i , port in enumerate (ports ):
394
+ with SourceLine (ports , i , WorkflowException ):
395
+ fragment = shortname (port ["id" ])
396
+ try :
397
+ ret [fragment ] = self .collect_output (port , builder , outdir , fs_access , compute_checksum = compute_checksum )
398
+ except Exception as e :
399
+ _logger .debug (
400
+ u"Error collecting output for parameter '%s'"
401
+ % shortname (port ["id" ]), exc_info = True )
402
+ raise WorkflowException (
403
+ u"Error collecting output for parameter '%s':\n %s"
404
+ % (shortname (port ["id" ]), indent (unicode (e ))))
403
405
404
406
if ret :
405
407
adjustFileObjs (ret ,
@@ -427,24 +429,25 @@ def collect_output(self, schema, builder, outdir, fs_access, compute_checksum=Tr
427
429
revmap = partial (revmap_file , builder , outdir )
428
430
429
431
if "glob" in binding :
430
- for gb in aslist (binding ["glob" ]):
431
- gb = builder .do_eval (gb )
432
- if gb :
433
- globpatterns .extend (aslist (gb ))
434
-
435
- for gb in globpatterns :
436
- if gb .startswith (outdir ):
437
- gb = gb [len (outdir )+ 1 :]
438
- elif gb == "." :
439
- gb = outdir
440
- elif gb .startswith ("/" ):
441
- raise WorkflowException ("glob patterns must not start with '/'" )
442
- try :
443
- r .extend ([{"location" : g ,
444
- "class" : "File" if fs_access .isfile (g ) else "Directory" }
445
- for g in fs_access .glob (fs_access .join (outdir , gb ))])
446
- except (OSError , IOError ) as e :
447
- _logger .warn (Text (e ))
432
+ with SourceLine (binding , "glob" , WorkflowException ):
433
+ for gb in aslist (binding ["glob" ]):
434
+ gb = builder .do_eval (gb )
435
+ if gb :
436
+ globpatterns .extend (aslist (gb ))
437
+
438
+ for gb in globpatterns :
439
+ if gb .startswith (outdir ):
440
+ gb = gb [len (outdir )+ 1 :]
441
+ elif gb == "." :
442
+ gb = outdir
443
+ elif gb .startswith ("/" ):
444
+ raise WorkflowException ("glob patterns must not start with '/'" )
445
+ try :
446
+ r .extend ([{"location" : g ,
447
+ "class" : "File" if fs_access .isfile (g ) else "Directory" }
448
+ for g in fs_access .glob (fs_access .join (outdir , gb ))])
449
+ except (OSError , IOError ) as e :
450
+ _logger .warn (Text (e ))
448
451
449
452
for files in r :
450
453
if files ["class" ] == "Directory" and "listing" not in files :
@@ -479,11 +482,13 @@ def collect_output(self, schema, builder, outdir, fs_access, compute_checksum=Tr
479
482
single = True
480
483
481
484
if "outputEval" in binding :
482
- r = builder .do_eval (binding ["outputEval" ], context = r )
485
+ with SourceLine (binding , "outputEval" , WorkflowException ):
486
+ r = builder .do_eval (binding ["outputEval" ], context = r )
483
487
484
488
if single :
485
489
if not r and not optional :
486
- raise WorkflowException ("Did not find output file with glob pattern: '{}'" .format (globpatterns ))
490
+ with SourceLine (binding , "glob" , WorkflowException ):
491
+ raise WorkflowException ("Did not find output file with glob pattern: '{}'" .format (globpatterns ))
487
492
elif not r and optional :
488
493
pass
489
494
elif isinstance (r , list ):
@@ -498,20 +503,21 @@ def collect_output(self, schema, builder, outdir, fs_access, compute_checksum=Tr
498
503
Callable [[Any ], Any ], revmap ))
499
504
500
505
if "secondaryFiles" in schema :
501
- for primary in aslist (r ):
502
- if isinstance (primary , dict ):
503
- primary ["secondaryFiles" ] = []
504
- for sf in aslist (schema ["secondaryFiles" ]):
505
- if isinstance (sf , dict ) or "$(" in sf or "${" in sf :
506
- sfpath = builder .do_eval (sf , context = primary )
507
- if isinstance (sfpath , basestring ):
508
- sfpath = revmap ({"location" : sfpath , "class" : "File" })
509
- else :
510
- sfpath = {"location" : substitute (primary ["location" ], sf ), "class" : "File" }
511
-
512
- for sfitem in aslist (sfpath ):
513
- if fs_access .exists (sfitem ["location" ]):
514
- primary ["secondaryFiles" ].append (sfitem )
506
+ with SourceLine (schema , "secondaryFiles" , WorkflowException ):
507
+ for primary in aslist (r ):
508
+ if isinstance (primary , dict ):
509
+ primary ["secondaryFiles" ] = []
510
+ for sf in aslist (schema ["secondaryFiles" ]):
511
+ if isinstance (sf , dict ) or "$(" in sf or "${" in sf :
512
+ sfpath = builder .do_eval (sf , context = primary )
513
+ if isinstance (sfpath , basestring ):
514
+ sfpath = revmap ({"location" : sfpath , "class" : "File" })
515
+ else :
516
+ sfpath = {"location" : substitute (primary ["location" ], sf ), "class" : "File" }
517
+
518
+ for sfitem in aslist (sfpath ):
519
+ if fs_access .exists (sfitem ["location" ]):
520
+ primary ["secondaryFiles" ].append (sfitem )
515
521
516
522
if not r and optional :
517
523
r = None
0 commit comments