1
1
from __future__ import absolute_import
2
+
2
3
import collections
4
+ from curses import meta
3
5
4
- from hawc_hal .serialize import Serialization
5
- from hawc_hal .region_of_interest import get_roi_from_dict
6
+ from threeML .io .logging import setup_logger
6
7
8
+ from hawc_hal .region_of_interest import get_roi_from_dict
9
+ from hawc_hal .serialize import Serialization
7
10
8
- from threeML .io .logging import setup_logger
9
11
log = setup_logger (__name__ )
10
12
log .propagate = False
11
13
12
- from ..healpix_handling import SparseHealpix , DenseHealpix
14
+ from ..healpix_handling import DenseHealpix , SparseHealpix
13
15
from .data_analysis_bin import DataAnalysisBin
14
16
15
17
16
- def from_hdf5_file (map_tree_file , roi ):
18
+ def from_hdf5_file (map_tree_file , roi , transits ):
17
19
"""
18
20
Create a MapTree object from a HDF5 file and a ROI. Do not use this directly, use map_tree_factory instead.
19
21
@@ -25,12 +27,12 @@ def from_hdf5_file(map_tree_file, roi):
25
27
# Read the data frames contained in the file
26
28
with Serialization (map_tree_file ) as serializer :
27
29
28
- analysis_bins_df , _ = serializer .retrieve_pandas_object (' /analysis_bins' )
29
- meta_df , _ = serializer .retrieve_pandas_object (' /analysis_bins_meta' )
30
- roimap , roi_meta = serializer .retrieve_pandas_object (' /ROI' )
31
-
32
- if len (roimap )> 0 :
33
- roi_meta [' roimap' ] = roimap .values
30
+ analysis_bins_df , _ = serializer .retrieve_pandas_object (" /analysis_bins" )
31
+ meta_df , _ = serializer .retrieve_pandas_object (" /analysis_bins_meta" )
32
+ roimap , roi_meta = serializer .retrieve_pandas_object (" /ROI" )
33
+
34
+ if len (roimap ) > 0 :
35
+ roi_meta [" roimap" ] = roimap .values
34
36
35
37
# Let's see if the file contains the definition of an ROI
36
38
if len (roi_meta ) > 0 :
@@ -47,16 +49,19 @@ def from_hdf5_file(map_tree_file, roi):
47
49
active_pixels_user = roi .active_pixels (1024 )
48
50
49
51
# This verifies that active_pixels_file is a superset (or equal) to the user-provided set
50
- assert set (active_pixels_file ) >= set (active_pixels_user ), \
51
- "The ROI you provided (%s) is not a subset " \
52
+ assert set (active_pixels_file ) >= set (active_pixels_user ), (
53
+ "The ROI you provided (%s) is not a subset "
52
54
"of the one contained in the file (%s)" % (roi , file_roi )
55
+ )
53
56
54
57
else :
55
58
56
59
# The user has provided no ROI, but the file contains one. Let's issue a warning
57
- log .warning ("You did not provide any ROI but the map tree %s contains "
58
- "only data within the ROI %s. "
59
- "Only those will be used." % (map_tree_file , file_roi ))
60
+ log .warning (
61
+ "You did not provide any ROI but the map tree %s contains "
62
+ "only data within the ROI %s. "
63
+ "Only those will be used." % (map_tree_file , file_roi )
64
+ )
60
65
61
66
# Make a copy of the file ROI and use it as if the user provided that one
62
67
roi = get_roi_from_dict (file_roi .to_dict ())
@@ -69,6 +74,12 @@ def from_hdf5_file(map_tree_file, roi):
69
74
70
75
data_analysis_bins = collections .OrderedDict ()
71
76
77
+ n_transits = meta_df ["n_transits" ].max () if transits is None else transits
78
+
79
+ # assert (
80
+ # n_transits <= meta_df["transits"].max()
81
+ # ), "Cannot use higher value than that of maptree"
82
+
72
83
for bin_name in bin_names :
73
84
74
85
this_df = analysis_bins_df .loc [bin_name ]
@@ -77,31 +88,48 @@ def from_hdf5_file(map_tree_file, roi):
77
88
if roi is not None :
78
89
79
90
# Get the active pixels for this plane
80
- active_pixels_user = roi .active_pixels (int (this_meta [' nside' ]))
91
+ active_pixels_user = roi .active_pixels (int (this_meta [" nside" ]))
81
92
82
93
# Read only the pixels that the user wants
83
- observation_hpx_map = SparseHealpix (this_df .loc [active_pixels_user , 'observation' ].values ,
84
- active_pixels_user , this_meta ['nside' ])
85
- background_hpx_map = SparseHealpix (this_df .loc [active_pixels_user , 'background' ].values ,
86
- active_pixels_user , this_meta ['nside' ])
94
+ observation_hpx_map = SparseHealpix (
95
+ this_df .loc [active_pixels_user , "observation" ].values
96
+ * (n_transits / meta_df ["n_transits" ].max ()),
97
+ active_pixels_user ,
98
+ this_meta ["nside" ],
99
+ )
100
+ background_hpx_map = SparseHealpix (
101
+ this_df .loc [active_pixels_user , "background" ].values
102
+ * (n_transits / meta_df ["n_transits" ].max ()),
103
+ active_pixels_user ,
104
+ this_meta ["nside" ],
105
+ )
87
106
88
107
else :
89
108
90
109
# Full sky
91
- observation_hpx_map = DenseHealpix (this_df .loc [:, 'observation' ].values )
92
- background_hpx_map = DenseHealpix (this_df .loc [:, 'background' ].values )
110
+ observation_hpx_map = DenseHealpix (
111
+ this_df .loc [:, "observation" ].values
112
+ * (n_transits / meta_df ["n_transits" ].max ())
113
+ )
114
+ background_hpx_map = DenseHealpix (
115
+ this_df .loc [:, "background" ].values
116
+ * (n_transits / meta_df ["n_transits" ].max ())
117
+ )
93
118
94
119
# This signals the DataAnalysisBin that we are dealing with a full sky map
95
120
active_pixels_user = None
96
121
97
122
# Let's now build the instance
98
- this_bin = DataAnalysisBin (bin_name ,
99
- observation_hpx_map = observation_hpx_map ,
100
- background_hpx_map = background_hpx_map ,
101
- active_pixels_ids = active_pixels_user ,
102
- n_transits = this_meta ['n_transits' ],
103
- scheme = 'RING' if this_meta ['scheme' ] == 0 else 'NEST' )
123
+ this_bin = DataAnalysisBin (
124
+ bin_name ,
125
+ observation_hpx_map = observation_hpx_map ,
126
+ background_hpx_map = background_hpx_map ,
127
+ active_pixels_ids = active_pixels_user ,
128
+ # n_transits=this_meta["n_transits"],
129
+ n_transits = n_transits ,
130
+ scheme = "RING" if this_meta ["scheme" ] == 0 else "NEST" ,
131
+ )
104
132
105
133
data_analysis_bins [bin_name ] = this_bin
106
134
107
- return data_analysis_bins
135
+ return data_analysis_bins , n_transits
0 commit comments