-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathtdose.py
1390 lines (1217 loc) · 83.7 KB
/
tdose.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
import pdb
import time
import os
import sys
import glob
import numpy as np
import astropy
import shutil
import collections
import multiprocessing
from astropy import wcs
from astropy.coordinates import SkyCoord
from astropy import units
import astropy.convolution
from astropy.nddata import Cutout2D
import astropy.io.fits as afits
from reproject import reproject_interp
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
import tdose
import tdose_utilities as tu
import tdose_model_FoV as tmf
import tdose_model_cube as tmc
import tdose_extract_spectra as tes
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def perform_extraction(setupfile='./tdose_setup_template.txt',
performcutout=True,generatesourcecat=True,modelrefimage=True,refimagemodel2cubewcs=True,
definePSF=True,modeldatacube=True,createsourcecube=True,store1Dspectra=True,plot1Dspectra=True,
plotS2Nspectra=True,save_init_model_output=False,clobber=False,verbose=True,verbosefull=False,
logterminaloutput=False,skipextractedobjects=False,skipspecificobjects=None):
"""
Perform extraction of spectra from data cube based on information in TDOSE setup file
--- INPUT ---
setupfile TDOSE setup file. Template can be generated with tu.generate_setup_template()
performcutout To skip cutting out subcubes and images (i.e., if the cutouts have already been
genereated and exist) set performcutout=False
generatesourcecat To skip generating the cutout source catalogs from the main source catalog of sources
to model (e.g., after editing the source catalog) set generatesourcecat=False
Note however, that these catalogs are needed to produce the full FoV source model cube with
tdose.gen_fullFoV_from_cutouts()
modelrefimage To skip modeling the reference image set modelrefimage=False
refimagemodel2cubewcs To skip converting the refence image model to the cube WCS system set refimagemodel2cubewcs=False
definePSF To skip generating the PSF definePSF=False
modeldatacube To skip modeling the data cube set modeldatacube=False
createsourcecube To skip creating the source model cube set createsourcecube=False
store1Dspectra To skip storing the 1D spectra to binary fits tables set store1Dspectra=False
plot1Dspectra Plot the 1D spectra after extracting them
plotS2Nspectra Plot signal-to-noise spectra after extracting the 1D spectra
save_init_model_output If a SExtractor catalog is provide to the keyword gauss_guess in the setup file
an initial guess including the SExtractor fits is generated for the Gaussian model.
To save a ds9 region, image and paramater list (the two latter is available from the default
output of the TDOSE modeling) set save_init_model_output=True
clobber If True existing output files will be overwritten
verbose Toggle verbosity
verbosefull Toggle extended verbosity
logterminaloutput The setup file used for the run will be looged (copied to the spec1D_directory) automatically
for each TDOSE extraction. To also log the output from the terminal set logterminaloutput=True
In this case no TDOSE output will be passed to the terminal.
skipextractedobjects To skip modeling and extraction of objects which were already extracted, i.e. object IDs with
a matching 'spec1D_name'*.fits file in the 'spec1D_directory', set this keyword to True.
NB This keyword does not apply to the cutouts; to ignore this process use the
performcutout and generatesourcecat keywords.
NB Note that spectra extracted with parent ids will not be recognized and therefore skipped.
Hence only a standard TDOSE extraction will work in combinations with skipextractedobjects
However, this generates all nescessary models and files for post-modeling parent extractions.
skipspecificobjects In addition to skipextractedobjects to skip specific objects (irrespective of whether they
have already been extracted or not) provide a list of source IDs to this keyword.
The same causions mentioned under skipextractedobjects applies to skipspecificobjects as well.
--- EXAMPLE OF USE ---
import tdose
# full extraction with minimal text output to prompt
tdose.perform_extraction(setupfile='./tdose_setup_candels-cdfs-02.txt',verbose=True,verbosefull=False)
# only plotting:
tdose.perform_extraction(setupfile='./tdose_setup_candels-cdfs-02.txt',performcutout=False,generatesourcecat=False,modelrefimage=False,refimagemodel2cubewcs=False,definePSF=False,modeldatacube=False,createsourcecube=False,store1Dspectra=False,plot1Dspectra=True,clobber=True,verbosefull=False)
"""
# defining function within the routine that can be called by the output logger at the end
def tdosefunction(setupfile,performcutout,generatesourcecat,modelrefimage,refimagemodel2cubewcs,
definePSF,modeldatacube,createsourcecube,store1Dspectra,plot1Dspectra,
plotS2Nspectra,save_init_model_output,clobber,verbose,verbosefull,skipextractedobjects,skipspecificobjects):
start_time = time.clock()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print('==================================================================================================')
if verbose: print(' TDOSE: Loading setup '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
if verbosefull:
verbose = True
setupdic = tu.load_setup(setupfile,verbose=verbose)
sourcecat_init = setupdic['source_catalog']
sourcedat_init = afits.open(sourcecat_init)[1].data
sourcehdr_init = afits.open(sourcecat_init)[1].header
sourceids_init = sourcedat_init[setupdic['sourcecat_IDcol']]
Nsources = len(sourceids_init)
sourcenumber = np.arange(Nsources)
if type(setupdic['sources_to_extract']) == np.str_ or (type(setupdic['sources_to_extract']) == str):
if setupdic['sources_to_extract'].lower() == 'all':
extractids = sourceids_init.astype(float)
else:
extractids = np.genfromtxt(setupdic['sources_to_extract'].astype(str),dtype=None,comments='#')
extractids = list(extractids.astype(float))
else:
extractids = setupdic['sources_to_extract']
Nextractions = len(extractids)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print('==================================================================================================')
if verbose: print(' TDOSE: Logging setup '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
setuplog = setupdic['spec1D_directory']+setupfile.split('/')[-1].replace('.txt','_logged.txt')
if os.path.isfile(setuplog) & (clobber == False):
if verbose: print(' - WARNING Logged setupfile exists and clobber = False. Not storing setup ')
else:
if verbose: print(' - Writing setup and command to spec1D_directory to log extraction setup and command that was run')
setupinfo = open(setupfile,'r')
setupcontent = setupinfo.read()
setupinfo.close()
cmdthatwasrun = "import tdose; tdose.perform_extraction(setupfile='%s',performcutout=%s,generatesourcecat=%s,modelrefimage=%s," \
"refimagemodel2cubewcs=%s,definePSF=%s,modeldatacube=%s,createsourcecube=%s,store1Dspectra=%s," \
"plot1Dspectra=%s,plotS2Nspectra=%s,save_init_model_output=%s,clobber=%s,verbose=%s,verbosefull=%s," \
"logterminaloutput=%s)" % \
(setuplog,performcutout,generatesourcecat,modelrefimage,refimagemodel2cubewcs,definePSF,modeldatacube,
createsourcecube,store1Dspectra,plot1Dspectra,plotS2Nspectra,save_init_model_output,clobber,verbose,
verbosefull,logterminaloutput)
loginfo = open(setuplog, 'w')
loginfo.write("# The setup file appended below was run with the command: \n# "+cmdthatwasrun+
" \n# on "+tu.get_now_string()+'\n# ')
loginfo.write(setupcontent)
loginfo.close()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if setupdic['model_cutouts']:
if verbose: print('==================================================================================================')
if verbose: print(' TDOSE: Generate cutouts around sources to extract '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
tdose.gen_cutouts(setupdic,extractids,sourceids_init,sourcedat_init,
performcutout=performcutout,generatesourcecat=generatesourcecat,clobber=clobber,
verbose=verbose,verbosefull=verbosefull,start_time=start_time)
else:
if verbose: print('==================================================================================================')
if verbose: print((' TDOSE: Model full FoV, i.e. no cutouts and incl. full source cat.'+
'( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )'))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print('==================================================================================================')
if verbose: print(' TDOSE: Defining and loading data for extractions '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
Nloops = 1
loopnames = [-9999] # Default: Extracting objects from full FoV.
# If to be done in cutouts, loopnames will be replaced with individual IDs
if setupdic['model_cutouts']:
Nloops = Nextractions
loopnames = extractids
if skipspecificobjects is None:
skipidlist = []
else:
skipidlist = [int(id) for id in skipspecificobjects]
for oo, extid in enumerate(loopnames):
if verbose:
infostr = ' - Starting extraction for object '+str("%4.f" % (oo+1))+' / '+\
str("%4.f" % Nloops)+' with ID = '+str(extid)+' '+tu.get_now_string()
start_time_obj = time.clock()
imgstr, imgsize, refimg, datacube, variancecube, sourcecat = tu.get_datinfo(extid,setupdic)
if not os.path.isfile(datacube):
infostr = infostr+' -> skipping as datacube to extract from not found '
skipthisobj = True
else:
if np.isfinite(afits.open(datacube)[setupdic['cube_extension']].data).any():
skipthisobj = False
else:
infostr = infostr+' -> skipping as no pixels in datacube are finite (all NaNs or Infs) '
skipthisobj = True
if skipextractedobjects or (int(extid) in skipidlist):
if int(extid) in skipidlist:
skipthisobj = True
infostr = infostr+' -> skipping per request '
else:
nameext2check = setupdic['spec1D_name']+'_'+setupdic['source_model']
id2check = str("%.10d" % extid)
specdir2check = setupdic['spec1D_directory']
file2check = specdir2check+nameext2check+'_'+id2check+'.fits'
if os.path.isfile(file2check):
skipthisobj = True
infostr = infostr+' -> skipping as spectrum exists '
else:
infostr = infostr+' '
if verbose:
if verbosefull:
print(infostr)
else:
sys.stdout.write("%s\r" % infostr)
sys.stdout.flush()
if skipthisobj:
continue
if setupdic['wht_image'] is not None:
refimg = refimg[0]
cube_data = afits.open(datacube)[setupdic['cube_extension']].data
cube_variance = afits.open(variancecube)[setupdic['variance_extension']].data
cube_hdr = afits.open(datacube)[setupdic['cube_extension']].header
cube_wcs2D = tu.WCS3DtoWCS2D(wcs.WCS(tu.strip_header(cube_hdr.copy())))
cube_scales = wcs.utils.proj_plane_pixel_scales(cube_wcs2D)*3600.0
cube_waves = np.arange(cube_hdr['NAXIS3'])*cube_hdr['CD3_3']+cube_hdr['CRVAL3']
img_data = afits.open(refimg)[setupdic['img_extension']].data
img_hdr = afits.open(refimg)[setupdic['img_extension']].header
img_wcs = wcs.WCS(tu.strip_header(img_hdr.copy()))
img_scales = wcs.utils.proj_plane_pixel_scales(img_wcs)*3600.0
modelimg = setupdic['models_directory']+'/'+\
refimg.split('/')[-1].replace('.fits','_'+setupdic['model_image_ext']+'_'+setupdic['source_model']+'.fits')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbosefull: print('--------------------------------------------------------------------------------------------------')
FoV_modelexists = False
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if setupdic['source_model'] == 'galfit':
if verbosefull: print(' Looking for galfit model of source ... ')
model_file = setupdic['galfit_directory']+'galfit_'+\
setupdic['ref_image'].split('/')[-1].replace('.fits','_output.fits')
if setupdic['model_cutouts']:
model_file = model_file.replace('.fits',imgstr+'.fits')
if os.path.isfile(model_file):
if verbosefull: print(' -> found it, so it will be used')
FoV_modelexists = True
FoV_modelfile = model_file
FoV_modeldata = afits.open(FoV_modelfile)[setupdic['galfit_model_extension']].data
else:
if verbosefull: print(' -> did not find it, so will generate gaussian TDOSE model')
sys.exit(' ---> Loading parameters and building model from galfit output is not enabled yet; sorry. '
'If you have the model try the source_model = modelimg setup')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if setupdic['source_model'] == 'modelimg':
if verbosefull: print(' Looking for ref_image model of source in "modelimg_directory"... ')
model_file = setupdic['modelimg_directory']+'model_'+\
setupdic['ref_image'].split('/')[-1]
if setupdic['model_cutouts']:
model_file = model_file.replace('.fits',imgstr+'.fits')
cube_model_file = model_file.replace('.fits','_cube.fits')
if os.path.isfile(cube_model_file):
if verbosefull: print(' -> found a cube model, so will use that (instead of any model files)')
FoV_modelexists = True
FoV_modelfile = cube_model_file
FoV_modeldata = afits.open(FoV_modelfile)[setupdic['modelimg_extension']].data
elif os.path.isfile(model_file):
if verbosefull: print(' -> found it, so it will be used')
FoV_modelexists = True
FoV_modelfile = model_file
FoV_modeldata = afits.open(FoV_modelfile)[setupdic['modelimg_extension']].data
else:
if verbosefull: print(' -> did not find any model or cube model:\n '+model_file+'\n '+cube_model_file+\
'\n so will skip object '+str(extid))
continue
try:
if FoV_modeldata == None:
print(('\n WARNING - No model data found in extension '+
str(setupdic['modelimg_extension'])+' of '+FoV_modelfile+'\n'))
except:
pass
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if not FoV_modelexists:
if verbosefull: print(' TDOSE: Model reference image '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
regionfile = setupdic['models_directory']+'/'+\
refimg.split('/')[-1].replace('.fits','_'+setupdic['model_param_reg']+'_'+setupdic['source_model']+'.reg')
modelparam = modelimg.replace('.fits','_objparam.fits') # output from reference image modeling
names = []
sourceids = afits.open(sourcecat)[1].data[setupdic['sourcecat_IDcol']]
for ii, sid in enumerate(sourceids):
if setupdic['sourcecat_parentIDcol'] is not None:
parentid = afits.open(sourcecat)[1].data[setupdic['sourcecat_parentIDcol']][ii]
namestr = str(parentid)+'>>'+str(sid)
else:
namestr = str(sid)
names.append(namestr)
centralpointsource = False # default value of centralpointsource
if setupdic['nondetections'] is None:
centralpointsource = False
elif type(setupdic['nondetections']) == np.str_ or (type(setupdic['nondetections']) == str):
if setupdic['nondetections'].lower() == 'all':
centralpointsource = True
else:
nondetids = np.genfromtxt(setupdic['nondetections'],dtype=None,comments='#')
nondetids = list(nondetids.astype(float))
if extid in nondetids:
centralpointsource = True
else:
if extid in setupdic['nondetections']:
centralpointsource = True
if centralpointsource:
if verbosefull: print(' - Object in list of non-detections. Adjusting model to contain central point source ')
if modelrefimage:
tdose.model_refimage(setupdic,refimg,img_hdr,sourcecat,modelimg,modelparam,regionfile,img_wcs,img_data,names,
save_init_model_output=save_init_model_output,centralpointsource=centralpointsource,
clobber=clobber,verbose=verbose,verbosefull=verbosefull,objid=extid)
else:
if verbose: print(' >>> Skipping modeling reference image (assume models exist)')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbosefull: print('--------------------------------------------------------------------------------------------------')
if verbosefull: print(' TDOSE: Convert ref. image model to cube WCS '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
cubewcsimg = setupdic['models_directory']+'/'+\
refimg.split('/')[-1].replace('.fits','_'+setupdic['model_image_cube_ext']+'_'+
setupdic['source_model']+'.fits')
if not FoV_modelexists:
paramREF = tu.build_paramarray(modelparam,verbose=verbosefull)
paramCUBE = tu.convert_paramarray(paramREF,img_hdr,cube_hdr,type=setupdic['source_model'].lower(),verbose=verbosefull)
elif FoV_modelexists:
modelimgsize = model_file
if refimagemodel2cubewcs:
cubehdu = afits.PrimaryHDU(cube_data[0,:,:])
cubewcshdr = cube_wcs2D.to_header()
for key in cubewcshdr:
if key == 'PC1_1':
cubehdu.header.append(('CD1_1',cubewcshdr[key],cubewcshdr[key]),end=True)
elif key == 'PC2_2':
cubehdu.header.append(('CD2_2',cubewcshdr[key],cubewcshdr[key]),end=True)
else:
cubehdu.header.append((key,cubewcshdr[key],cubewcshdr[key]),end=True)
if not FoV_modelexists:
modelimgsize = cube_data.shape[1:]
else:
if len(FoV_modeldata.shape) == 2:
FoV_modeldata_reproject = FoV_modeldata
elif len(FoV_modeldata.shape) == 3:
FoV_modeldata_reproject = np.sum(FoV_modeldata, axis=0)
else:
sys.exit(' ---> Shape of model data array is not 2 (image) or 3 (cube) ')
projected_image, footprint = reproject_interp( (FoV_modeldata_reproject, img_wcs), cube_wcs2D,
shape_out=cube_data.shape[1:])
projected_image[np.isnan(projected_image)] = 0.0 # replacing NaNs from reprojection with 0s
paramCUBE = projected_image/np.sum(projected_image)*np.sum(FoV_modeldata) # normalize and scale to match FoV_modeldata
tmf.save_modelimage(cubewcsimg,paramCUBE,modelimgsize,modeltype=setupdic['source_model'].lower(),
param_init=False,clobber=clobber,outputhdr=cubehdu.header,verbose=verbosefull)
if FoV_modelexists:
if (len(FoV_modeldata.shape) == 3):
paramCUBE = np.zeros([FoV_modeldata.shape[0],cube_data.shape[1],cube_data.shape[2]])
if verbose: print(' - Reprojecting and normalizing individual components in object model cube to use for extraction ')
for component in np.arange(int(FoV_modeldata.shape[0])):
projected_comp, footprint_comp = reproject_interp( (FoV_modeldata[component,:,:], img_wcs), cube_wcs2D,
shape_out=cube_data.shape[1:])
projected_comp[np.isnan(projected_comp)] = 0.0
paramCUBE[component,:,:] = projected_comp/ np.sum(projected_comp)
else:
if verbose: print(' >>> Skipping converting reference image model to cube WCS frame (assume models exist)')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbosefull: print('--------------------------------------------------------------------------------------------------')
if verbosefull: print(' TDOSE: Defining PSF as FWHM = p0 + p1(lambda-'+str(setupdic['psf_FWHMp2'])+'A) '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
if definePSF or modeldatacube:
if setupdic['source_model'] == 'aperture':
if verbose: print(' >>> Skipping defining PSF as source_model = "aperture", i.e., convolution of ref. image model')
paramPSF = None
else:
paramPSF = tdose.define_psf(setupdic,datacube,cube_data,cube_scales,cube_hdr,cube_waves,
clobber=clobber,verbose=verbose,verbosefull=verbosefull)
else:
if verbose: print(' >>> Skipping defining PSF of data cube (assume it is defined)')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbosefull: print('--------------------------------------------------------------------------------------------------')
if verbosefull: print(' TDOSE: Modelling data cube '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
modcubename = setupdic['models_directory']+'/'+\
datacube.split('/')[-1].replace('.fits','_'+setupdic['model_cube_ext']+'_'+setupdic['source_model']+'.fits')
rescubename = setupdic['models_directory']+'/'+\
datacube.split('/')[-1].replace('.fits','_'+setupdic['residual_cube_ext']+'_'+setupdic['source_model']+'.fits')
psfcubename = setupdic['models_directory']+'/'+datacube.split('/')[-1].replace('.fits','_tdose_psfcube_'+
setupdic['source_model']+'.fits')
if modeldatacube:
tdose.model_datacube(setupdic,extid,modcubename,rescubename,cube_data,cube_variance,paramCUBE,cube_hdr,paramPSF,
psfcubename=psfcubename,clobber=clobber,verbose=verbose,verbosefull=verbosefull)
else:
if verbose: print(' >>> Skipping modeling of data cube (assume it exists)')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
sourcecubename = setupdic['models_directory']+'/'+\
datacube.split('/')[-1].replace('.fits','_'+setupdic['source_model_cube_ext']+'_'+
setupdic['source_model']+'.fits')
if createsourcecube:
if verbosefull: print('--------------------------------------------------------------------------------------------------')
if verbosefull: print(' TDOSE: Creating source model cube '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
model_cube = afits.open(modcubename)[setupdic['cube_extension']].data
layer_scales = afits.open(modcubename)['WAVESCL'].data
if setupdic['source_model'].lower() != 'aperture':
psfcube = afits.open(psfcubename)[setupdic['cube_extension']].data
else:
psfcube = None
source_model_cube = tmc.gen_source_model_cube(layer_scales,model_cube.shape,paramCUBE,paramPSF,
psfcube=psfcube,paramtype=setupdic['source_model'],
psfparamtype=setupdic['psf_type'],save_modelcube=True,
cubename=sourcecubename,clobber=clobber,outputhdr=cube_hdr,
verbose=verbosefull)
else:
if verbose: print(' >>> Skipping generating source model cube (assume it exists)')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print('==================================================================================================')
if verbose: print(' TDOSE: Storing extracted 1D spectra to files '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
specoutputdir = setupdic['spec1D_directory']
model_cube_file = modcubename
variance_cube_file = variancecube
variance_cube_ext = setupdic['variance_extension']
smc_file = sourcecubename
smc_ext = setupdic['cube_extension']
# - - - - - - - - - - - - Putting together source association dictionary - - - - - - - - - - - - -
SAD = collections.OrderedDict()
if FoV_modelexists:
sourceids = afits.open(sourcecat)[1].data[setupdic['sourcecat_IDcol']]
if extid == -9999: # If full FoV is modeled
if setupdic['sources_to_extract'] == 'all':
for ss, sid in enumerate(extractids):
SAD[str("%.10d" % int(sid))] = [ss]
else:
if setupdic['sourcecat_parentIDcol'] is not None:
parentids = afits.open(sourcecat)[1].data[setupdic['sourcecat_parentIDcol']]
for ee, eid in enumerate(extractids):
sourceent = np.where(sourceids == eid)[0]
parent = parentids[sourceent][0]
if float(parent) < 0: # ignoring parents with negative IDs. Looking for object IDs in parent list instead
sourceent = np.where(parentids == eid)[0]
parent = parentids[sourceent][0]
groupent = np.where(parentids == parent)[0]
SAD = {str("%.10d" % int(parent))+'-'+str("%.10d" % int(eid)) : groupent.tolist()}
else:
for ss, sid in enumerate(extractids):
SAD[str("%.10d" % int(sid))] = [ss]
else: # If cutouts are modeled instead of full FoV
if setupdic['sourcecat_parentIDcol'] is not None:
parentids = afits.open(sourcecat)[1].data[setupdic['sourcecat_parentIDcol']]
sourceent = np.where(sourceids == extid)[0]
parent = parentids[sourceent][0]
if float(parent) < 0: # ignoring parents with negative IDs. Looking for object IDs in parent list instead
sourceent = np.where(parentids == extid)[0]
parent = parentids[sourceent][0]
groupent = np.where(parentids == parent)[0]
SAD[str("%.10d" % int(parent))+'-'+str("%.10d" % int(extid))] = groupent.tolist()
else:
sourceent = np.where(sourceids == extid)[0]
SAD[str("%.10d" % int(extid))] = sourceent.tolist()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if store1Dspectra:
specfiles = tes.extract_spectra(model_cube_file,model_cube_ext=setupdic['cube_extension'],
layer_scale_ext='WAVESCL',clobber=clobber,
nameext=setupdic['spec1D_name']+'_'+setupdic['source_model'],
source_association_dictionary=SAD,outputdir=specoutputdir,
variance_cube_file=variance_cube_file,variance_cube_ext=variance_cube_ext,
source_model_cube_file=smc_file,source_cube_ext=smc_ext,
data_cube_file=datacube,verbose=verbosefull)
else:
if verbose: print(' >>> Skipping storing 1D spectra to binary fits tables ')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print('==================================================================================================')
if verbose: print(' TDOSE: Plotting extracted spectra '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
if setupdic['plot_generate']:
tdose.plot_spectra(setupdic,SAD,specoutputdir,plot1Dspectra=plot1Dspectra,plotS2Nspectra=plotS2Nspectra,
verbose=verbosefull)
objids = list(SAD.keys())
tu.gen_overview_plot(objids,setupfile,verbose=verbosefull)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose:
print('==================================================================================================')
print(' TDOSE: Modeling and extraction done for object '+str(extid)+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds ) ')
print(' '+\
' --> Object runtime = '+str("%10.4f" % (time.clock() - start_time_obj))+' seconds <-- ')
print(' - To open all generated files in DS9 execute the following command ')
ds9cmd = ' ds9 '
Nframes = 0
if os.path.isfile(refimg):
ds9cmd = ds9cmd+refimg+' '
Nframes = Nframes + 1
if os.path.isfile(setupdic['source_catalog'].replace('.fits','.reg')):
ds9cmd = ds9cmd+' -region '+setupdic['source_catalog'].replace('.fits','.reg')+' '
if os.path.isfile(modelimg):
ds9cmd = ds9cmd+modelimg
Nframes = Nframes + 1
if os.path.isfile(regionfile):
ds9cmd = ds9cmd+' -region '+regionfile+' '
if os.path.isfile(modelimg.replace('.fits','_residual.fits')):
ds9cmd = ds9cmd+modelimg.replace('.fits','_residual.fits')+' '
Nframes = Nframes + 1
if os.path.isfile(cubewcsimg):
ds9cmd = ds9cmd+cubewcsimg+' '
Nframes = Nframes + 1
if os.path.isfile(datacube):
ds9cmd = ds9cmd+datacube+' '
Nframes = Nframes + 1
if os.path.isfile(modcubename):
ds9cmd = ds9cmd+modcubename+' '
Nframes = Nframes + 1
if os.path.isfile(rescubename):
ds9cmd = ds9cmd+rescubename+' '
Nframes = Nframes + 1
if os.path.isfile(sourcecubename):
ds9cmd = ds9cmd+sourcecubename+' '
Nframes = Nframes + 1
ds9cmd = ds9cmd+'-lock frame wcs -tile grid layout '+str(Nframes)+' 1 &'
print(ds9cmd)
print('==================================================================================================')
if verbose:
print("""
.''.
.''. *''* :_\/_: .
:_\/_: . .:.*_\/_* : /\ : .'.:.'.
.''.: /\ : _\(/_ ':'* /\ * : '..'. -=:o:=-
:_\/_:'.:::. /)\*''* .|.* '.\'/.'_\(/_'.':'.'
: /\ : ::::: '*_\/_* | | -= o =- /)\ ' *
'..' ':::' * /\ * |'| .'/.\'. '._____
* __*..* | | : |. |' .---|
_* .-' '-. | | .--'| || | _| |
.-'| _.| | || '-__ | | | || |
|' | |. | || | | | | || |
___| '-' ' "" '-' '-.' '` |____
_________ _____ ______ _____ ______
|__ __|| | __ \\\\ / __ \\\\ / ____\\\\ | ___||
| || | || \ \\\\ | || | || \ \\\\__ | ||__
| || | || | || | || | || \__ \\\\ | __||
| || | ||_/ // | ||_| || ___\ \\\\ | ||___
|__|| |_____// \_____// |______// |_____||
_____ ______ __ __ ______ ___
| __ \\\\ / __ \\\\ | \\\\ | || | ___|| | ||
| || \ \\\\ | || | || | \\\\| || | ||__ | ||
| || | || | || | || | || | __|| |__||
| ||_/ // | ||_| || | ||\ || | ||___ __
|_____// \_____// |__|| \__|| |_____|| |__||
==================================================================================================
""")
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if logterminaloutput:
setupdic = tu.load_setup(setupfile,verbose=False)
outputlog = setupdic['spec1D_directory']+setupfile.split('/')[-1].replace('.txt','_logged_output.txt')
bufsize = 0
f = open(outputlog, 'a', bufsize)
sys.stdout = f
f.write('\n\n\n\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> LOG FROM RUN STARTED ON '+tu.get_now_string()+
' <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n\n')
tdosefunction(setupfile,performcutout,generatesourcecat,modelrefimage,refimagemodel2cubewcs,
definePSF,modeldatacube,createsourcecube,store1Dspectra,plot1Dspectra,
plotS2Nspectra,save_init_model_output,clobber,verbose,verbosefull,
skipextractedobjects,skipspecificobjects)
sys.stdout = sys.__stdout__
f.close()
else:
tdosefunction(setupfile,performcutout,generatesourcecat,modelrefimage,refimagemodel2cubewcs,
definePSF,modeldatacube,createsourcecube,store1Dspectra,plot1Dspectra,
plotS2Nspectra,save_init_model_output,clobber,verbose,verbosefull,
skipextractedobjects,skipspecificobjects)
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def perform_extractions_in_parallel(setupfiles,Nsessions=0,verbose=True,generateFullFoVmodel=True,generateOverviewPlots=True,
# - - - - - - - Inputs passed to tdose.perform_extraction() - - - - - - -
performcutout=True,generatesourcecat=True,modelrefimage=True,refimagemodel2cubewcs=True,
definePSF=True,modeldatacube=True,createsourcecube=True,store1Dspectra=True,plot1Dspectra=True,
plotS2Nspectra=True,save_init_model_output=False,clobber=False,verbosePE=True,verbosefull=False,
logterminaloutput=True,skipextractedobjects=False,skipspecificobjects=None):
"""
Run multiple TDOSE setups in parallel
--- INPUT ---
setupfiles List of setup files to run in parallel
Nsessions The number of parallel sessions to launch (the list of setupfiles will be bundled up in
Nsessions bundles to run). The default is 0 which will run Nsetupfiles sessions with
1 setup file per parallel session.
verbose Toggle verbosity
generateFullFoVmodel Combine cutouts (if the run is based on cutouts) into full FoV model cube with
tdose.gen_fullFoV_from_cutouts()
generateOverviewPlots Generate overview plots of each of the extracted objects with tu.gen_overview_plot()
**remaining input** Input passed to tdose.perform_extraction();
see tdose.perform_extraction() header for details
--- EXAMPLE OF USE ---
import tdose, glob
setupfiles = ['setup01','setup02','setup03','setup04','setup05','setup06','setup07','setup08','setup09']
setupfiles = glob.glob('/Users/kschmidt/work/TDOSE/tdose_setup_candels-cdfs-*[0-99].txt')
bundles, paralleldic = tdose.perform_extractions_in_parallel(setupfiles,Nsessions=2,clobber=True,performcutout=False,store1Dspectra=False,plot1Dspectra=False,generateFullFoVmodel=False,generateOverviewPlots=True,skipextractedobjects=True,logterminaloutput=True)
"""
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def parallel_worker(setupfiles,performcutout,generatesourcecat,modelrefimage,refimagemodel2cubewcs,definePSF,
modeldatacube,createsourcecube,store1Dspectra,plot1Dspectra,plotS2Nspectra,
save_init_model_output,clobber,verbose,verbosefull,logterminaloutput,
generateFullFoVmodel=True,generateOverviewPlots=True,skipextractedobjects=False,skipspecificobjects=None):
"""
Multiprocessing worker function
"""
for setupfile in setupfiles:
tdose.perform_extraction(setupfile=setupfile,performcutout=performcutout,generatesourcecat=generatesourcecat,
modelrefimage=modelrefimage,refimagemodel2cubewcs=refimagemodel2cubewcs,
definePSF=definePSF,modeldatacube=modeldatacube,createsourcecube=createsourcecube,
store1Dspectra=store1Dspectra,plot1Dspectra=plot1Dspectra,plotS2Nspectra=plotS2Nspectra,
save_init_model_output=save_init_model_output,clobber=clobber,
verbose=verbose,verbosefull=verbosefull,logterminaloutput=logterminaloutput,
skipextractedobjects=skipextractedobjects,skipspecificobjects=skipspecificobjects)
if generateFullFoVmodel:
tdose.gen_fullFoV_from_cutouts(setupfile,clobber=clobber)
if generateOverviewPlots:
tu.gen_overview_plot('all',setupfile)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bundles = collections.OrderedDict()
Nsetups = len(setupfiles)
if (type(Nsessions) is not int) or (Nsessions < 0):
sys.exit(' ---> Nsessions must be a positive integer; it was not: '+Nsessions)
if Nsessions == 0:
Nbundle = Nsetups
for ii in np.arange(int(Nbundle)):
string = 'bundleNo'+str(ii+1)
bundles[string] = [setupfiles[ii]]
else:
Nbundle = int(Nsessions)
bundlesize = int(np.ceil(float(Nsetups)/float(Nbundle)))
for ii in np.arange(int(Nbundle)):
string = 'bundleNo'+str(ii+1)
if ii == Nbundle: # Last bundle
bundles[string] = setupfiles[ii*bundlesize:]
else:
bundles[string] = setupfiles[bundlesize*ii:bundlesize*(ii+1)]
if verbose: print(' - Found '+str(Nsetups)+' setup files to bundle up and run '+str(Nbundle)+' parallel sessions for')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' ---- Starting multiprocess parallel run of the '+str(Nsetups)+' TDOSE setups ---- ')
tstart = tu.get_now_string(withseconds=True)
mngr = multiprocessing.Manager() # initialize Manager too keep track of worker function output
return_dict = mngr.dict() # define Manager dictionary to store output from Worker function in
jobs = []
for ii in np.arange(int(Nbundle)):
bundlekey = 'bundleNo'+str(ii+1)
if len(bundles[bundlekey]) == 1:
jobname = bundles[bundlekey][0].split('/')[-1]
else:
jobname = bundlekey
job = multiprocessing.Process(target=parallel_worker,
args = (bundles[bundlekey],performcutout,generatesourcecat,modelrefimage,
refimagemodel2cubewcs,definePSF,modeldatacube,createsourcecube,store1Dspectra,
plot1Dspectra,plotS2Nspectra,save_init_model_output,clobber,
verbose,verbosefull,logterminaloutput,
generateFullFoVmodel,generateOverviewPlots,skipextractedobjects,skipspecificobjects),
name = jobname)
jobs.append(job)
job.start()
#job.join() # wait until job has finished
for job in jobs:
job.join()
tend = tu.get_now_string(withseconds=True)
if verbose:
print('\n ---- The perform_extractions_in_parallel finished running the jobs for all TDOSE setups ----')
print(' Start : '+tstart)
print(' End : '+tend)
print(' Exitcode = 0 : job produced no error ')
print(' Exitcode > 0 : job had an error, and exited with that code (signal.SIGTERM)')
print(' Exitcode < 0 : job was killed with a signal of -1 * exitcode (signal.SIGTERM)')
for job in jobs:
print(' - The job running field ',job.name,' exited with exitcode: ',job.exitcode)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Adding output from parallelized run to dictionary')
dict = {}
for key in list(return_dict.keys()):
dict[key] = return_dict[key] # filling dictionary
return bundles, dict
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def gen_cutouts(setupdic,extractids,sourceids_init,sourcedat_init,
performcutout=True,generatesourcecat=True,clobber=False,verbose=True,verbosefull=True,start_time=0.0,
check4modelcat=True):
"""
Generate cutouts of reference image and data cube
--- INPUT ---
setupdic Dictionary containing the setup parameters read from the TDOSE setup file
extractids IDs of objects to extract spectra for
sourceids_init The initial source IDs
sourcedat_init The initial source data
performcutout Set to true to actually perform cutouts.
generatesourcecat To generate a (sub) source catalog corresponding to the objects in the cutout
clobber Overwrite existing files
verbose Toggle verbosity
verbosefull Toggle extended verbosity
start_time Start time of wrapper cutout generation is embedded in
"""
Nextractions = len(extractids)
cut_images = []
cut_cubes = []
for oo, cutoutid in enumerate(extractids):
if verbose:
infostr = ' - Cutting out object '+str("%4.f" % (oo+1))+' / '+\
str("%4.f" % Nextractions)+' with ID = '+str(cutoutid)+' '+tu.get_now_string()
if verbosefull:
print(infostr)
else:
sys.stdout.write("%s\r" % infostr)
sys.stdout.flush()
objent = np.where(sourceids_init == cutoutid)[0]
if len(objent) != 1:
sys.exit(' ---> More than one (or no) match in source catalog to ID '+str(cutoutid))
ra = sourcedat_init[setupdic['sourcecat_racol']][objent]
dec = sourcedat_init[setupdic['sourcecat_deccol']][objent]
cutstr, cutoutsize, cut_img, cut_cube, cut_variance, cut_sourcecat = tu.get_datinfo(cutoutid,setupdic)
if setupdic['wht_image'] is None:
imgfiles = [setupdic['ref_image']]
imgexts = [setupdic['img_extension']]
cut_img = [cut_img]
else:
imgfiles = [setupdic['ref_image'],setupdic['wht_image']]
imgexts = [setupdic['img_extension'],setupdic['wht_extension']]
cut_images.append(cut_img[0])
if performcutout:
if setupdic['data_cube'] == setupdic['variance_cube']:
cutouts = tu.extract_subcube(setupdic['data_cube'],ra,dec,cutoutsize,cut_cube,
cubeext=[setupdic['cube_extension'],setupdic['variance_extension']],clobber=clobber,
imgfiles=imgfiles,imgexts=imgexts,
imgnames=cut_img,verbose=verbosefull)
else:
cutouts = tu.extract_subcube(setupdic['data_cube'],ra,dec,cutoutsize,cut_cube,
cubeext=[setupdic['cube_extension']],clobber=clobber,
imgfiles=imgfiles,imgexts=imgexts,
imgnames=cut_img,verbose=verbosefull)
cutouts = tu.extract_subcube(setupdic['variance_cube'],ra,dec,cutoutsize,cut_variance,
cubeext=[setupdic['variance_extension']],clobber=clobber,
imgfiles=None,imgexts=None,imgnames=None,verbose=verbosefull)
else:
if verbose: print(' >>> Skipping cutting out images and cubes (assuming they exist) ')
cutouts = 'dummy'
# --- SUB-SOURCE CAT ---
if generatesourcecat & (cutouts is not None):
foundmodelcat = False
if check4modelcat:
if setupdic['modelimg_directory'] is not None:
if not performcutout: # only print if info from cutting out is not printed
print(' - Looking for source catalogs in the "modelimg_directory" ')
checkstring = 'noModelComponent'
if checkstring in cut_sourcecat:
print((' - '+checkstring+' source catalog; using (ra,dec) match to "source_catalog" instead'))
else:
model_sourcecat_str = setupdic['modelimg_directory']+'/*'+\
cut_sourcecat.split('_id')[-1].replace('.fits','_sourcecatalog.fits')
model_sourcecat = glob.glob(model_sourcecat_str)
if len(model_sourcecat) == 1:
if not performcutout: # only print if info from cutting out is not printed
print(' Found a unqie match for the objects -> using it instead of a (ra,dec) match to "source_catalog" ')
shutil.copyfile(model_sourcecat[0],cut_sourcecat)
foundmodelcat = True
elif len(model_sourcecat) > 1:
if not performcutout: # only print if info from cutting out is not printed
print((' Found '+str(len(model_sourcecat))+
' matches for the object -> using (ra,dec) match to "source_catalog" instead'))
else:
if not performcutout: # only print if info from cutting out is not printed
print(' Did not find any generating cutout source catalog from (ra,dec) match to "source_catalog" ')
if not foundmodelcat:
if not performcutout: # only print if info from cutting out is not printed
print(' - Generating cutout source catalog from (ra,dec) match to main "source_catalog" ')
obj_in_cut_fov = np.where( (sourcedat_init[setupdic['sourcecat_racol']] <
(ra + cutoutsize[0]/2./3600. / np.cos(np.deg2rad(dec)))) &
(sourcedat_init[setupdic['sourcecat_racol']] >
(ra - cutoutsize[0]/2./3600. / np.cos(np.deg2rad(dec)))) &
(sourcedat_init[setupdic['sourcecat_deccol']] < (dec + cutoutsize[1]/2./3600.)) &
(sourcedat_init[setupdic['sourcecat_deccol']] > (dec - cutoutsize[1]/2./3600.)) )[0]
Ngoodobj = len(obj_in_cut_fov)
cutout_hdr = afits.open(cut_images[oo])[setupdic['img_extension']].header
cut_sourcedat = sourcedat_init[obj_in_cut_fov].copy()
storearr = np.zeros(Ngoodobj,dtype=cut_sourcedat.columns) # define structure array to store to fits file
for ii in np.arange(Ngoodobj):
striphdr = tu.strip_header(cutout_hdr.copy())
wcs_in = wcs.WCS(striphdr)
skycoord = SkyCoord(cut_sourcedat[ii][setupdic['sourcecat_racol']],
cut_sourcedat[ii][setupdic['sourcecat_deccol']], frame='fk5', unit='deg')
pixcoord = wcs.utils.skycoord_to_pixel(skycoord,wcs_in,origin=1)
cut_sourcedat[ii][setupdic['sourcecat_xposcol']] = pixcoord[0]
cut_sourcedat[ii][setupdic['sourcecat_yposcol']] = pixcoord[1]
storearr[ii] = np.vstack(np.asarray(cut_sourcedat))[ii,:]
astropy.io.fits.writeto(cut_sourcecat,storearr,header=None,overwrite=clobber)
else:
if verbose: print(' >>> Skipping generating the cutout source catalog for '+str(cutoutid)+' ')
if not verbosefull:
if verbose: print('\n done')
if verbose:
print('==================================================================================================')
print(' TDOSE: Done cutting out sub cubes and postage stamps '+\
' ( Total runtime = '+str("%10.4f" % (time.clock() - start_time))+' seconds )')
print(' - To open resulting images in DS9 execute the following command ')
ds9string = ' ds9 '+setupdic['ref_image']+' xxregionxx '+\
' '.join(cut_images)+' -lock frame wcs -tile grid layout '+str(len(cut_images)+1)+' 1 &'
regname = setupdic['source_catalog'].replace('.fits','.reg')
if os.path.isfile(regname):
ds9string = ds9string.replace('xxregionxx',' -region '+regname+' ')
else:
ds9string = ds9string.replace('xxregionxx',' ')
print(ds9string)
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
def gen_fullFoV_from_cutouts(setupfile,store_sourcemodelcube=False,store_modelcube=True,clobber=False,verbose=True):
"""
This routine combines the 3D scaled model cubes obtained from individual cutouts to a
source model cube of the full FoV the cutouts were extracted from so the full FoV IFU
cube can be modified based on the individual cutouts.
--- INPUT ---
setupfile TDOSE setup file used to run tdose.perform_extraction() with model_cutouts=True
store_sourcemodelcube Save the 4D source model cube to a fits file (it's large: ~ size of 3Dcube * Nsources).
Hence, if too little memory is available on the system python will likely crash.
store_modelcube If true a model cube (woudl be the same as summing over the source model cube) will be
stored as a seperate fits file. This only requieres memory enough to handle two cubes
as opposed to Nsources * cube when manipulating the 4D source model cube.
clobber Overwrite existing files
verbose Toggle verbosity
--- EXAMPLE OF USE ---
import tdose
tdose.gen_fullFoV_from_cutouts('/Users/kschmidt/work/TDOSE/tdose_setup_candels-cdfs-02.txt')
"""
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Loading setup file and getting IDs that were extracted (and hence cutout)')
setupdic = tu.load_setup(setupfile,verbose=verbose)
sourcecat_init = setupdic['source_catalog']
sourcedat_init = afits.open(sourcecat_init)[1].data
sourcehdr_init = afits.open(sourcecat_init)[1].header
sourceids_init = sourcedat_init[setupdic['sourcecat_IDcol']]
Nsources = len(sourceids_init)
sourcenumber = np.arange(Nsources)
if type(setupdic['sources_to_extract']) == np.str_ or (type(setupdic['sources_to_extract']) == str):
if setupdic['sources_to_extract'].lower() == 'all':
extractids = sourceids_init.astype(float)
else:
extractids = np.genfromtxt(setupdic['sources_to_extract'],dtype=None,comments='#')
extractids = list(extractids.astype(float))
else:
extractids = setupdic['sources_to_extract']
Nextractions = len(extractids)
if verbose: print(' Will combine models of '+str(Nextractions)+' extracted objects (if models exists) into full FoV cubes ')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Checking that source model cubes exist in models_directory ')
modeldir = setupdic['models_directory']
basename = setupdic['data_cube'].split('/')[-1].split('.fit')[0]
for objid in extractids:
sourcemodelcube = glob.glob(modeldir+basename+'*id'+str(int(objid))+'*cutout*'+
setupdic['source_model_cube_ext']+'_'+setupdic['psf_type']+'.fits')
if len(sourcemodelcube) == 0:
if verbose: print(' WARNING: did not find a source model cube for object '+str(objid))
elif len(sourcemodelcube) > 1:
if verbose: print(' WARNING: found more than one source model cube for object '+str(objid)+\
'\n Using '+sourcemodelcube[0])
if verbose: print(' If no WARNINGs raised, all cubes were found')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Build template full FoV cubes to fill with models')
cube_data = afits.open(setupdic['data_cube'])[setupdic['cube_extension']].data
cube_data_hdr = afits.open(setupdic['data_cube'])[setupdic['cube_extension']].header
striphdr = tu.strip_header(cube_data_hdr.copy())
cubewcs = wcs.WCS(striphdr)
cubewcs_2D = tu.WCS3DtoWCS2D(cubewcs.copy())
cube_shape = cube_data.shape
if store_sourcemodelcube:
smc_out = np.zeros([Nextractions,cube_shape[0],cube_shape[1],cube_shape[2]])
if store_modelcube:
cube_out = np.zeros([cube_shape[0],cube_shape[1],cube_shape[2]])
cube_model = np.zeros([cube_shape[0],cube_shape[1],cube_shape[2]])
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if verbose: print(' - Adding individual models to full FoV output cubes ')
for oo, objid in enumerate(extractids):
cutstr, cutoutsize, cut_img, cut_cube, cut_variance, cut_sourcecat = tu.get_datinfo(objid,setupdic)
sourcemodelcube = glob.glob(modeldir+basename+'*id'+str(int(objid))+'*cutout*'+
setupdic['source_model_cube_ext']+'_'+setupdic['psf_type']+'.fits')
if len(sourcemodelcube) > 0:
subsourcecat_file = setupdic['source_catalog'].replace('.fits',cutstr+'.fits')
if not os.path.isfile(subsourcecat_file):
sys.exit(' ---> did not find the source catalog \n '+subsourcecat_file+
'\n need it to locate model and define region of insertion in full FoV cube. ')
subsourcecat = afits.open(subsourcecat_file)[1].data
objid_modelent = np.where(subsourcecat['id'] == objid)[0][0]
if setupdic['sourcecat_parentIDcol'] is None:
parent_id = None
Nparent = 1
else:
parent_id = subsourcecat[setupdic['sourcecat_parentIDcol']][objid_modelent]
parent_ent = np.where(subsourcecat[setupdic['sourcecat_parentIDcol']] == parent_id)[0]
source_ids = subsourcecat['id'][parent_ent]
Nparent = len(parent_ent)
sourcemodelhdu = afits.open(sourcemodelcube[0])
if Nparent == 1:
infostr = ' > Getting object model for '+str(int(objid))+' (source model no. '+str(int(objid_modelent))+')'+\
' (obj '+str("%.5d" % (oo+1))+' / '+str("%.5d" % (Nextractions))+') '
sourcemodel = sourcemodelhdu[setupdic['cube_extension']].data[objid_modelent,:,:,:]
else:
infostr = ' > Getting object model for '+str(int(parent_id))+'\n (combining source models: '+\
','.join([str(int(id)) for id in source_ids])+', i.e. source model no. '+\
','.join([str(int(ent)) for ent in parent_ent])+')'+\
' (obj '+str("%.5d" % (oo+1))+' / '+str("%.5d" % (Nextractions))+') '
sourcemodel = np.sum(sourcemodelhdu[setupdic['cube_extension']].data[parent_ent,:,:,:],axis=0)
if verbose:
sys.stdout.write("%s\r" % infostr)
sys.stdout.flush()