Skip to content

Commit 75dafd1

Browse files
authored
Merge pull request #84 from torresramiro350/master
Set number of transits to HAL
2 parents c671ce0 + bf3fad3 commit 75dafd1

File tree

9 files changed

+348
-753
lines changed

9 files changed

+348
-753
lines changed

hawc_hal/HAL.py

+65-116
Large diffs are not rendered by default.

hawc_hal/maptree/from_hdf5_file.py

+58-30
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,21 @@
11
from __future__ import absolute_import
2+
23
import collections
4+
from curses import meta
35

4-
from hawc_hal.serialize import Serialization
5-
from hawc_hal.region_of_interest import get_roi_from_dict
6+
from threeML.io.logging import setup_logger
67

8+
from hawc_hal.region_of_interest import get_roi_from_dict
9+
from hawc_hal.serialize import Serialization
710

8-
from threeML.io.logging import setup_logger
911
log = setup_logger(__name__)
1012
log.propagate = False
1113

12-
from ..healpix_handling import SparseHealpix, DenseHealpix
14+
from ..healpix_handling import DenseHealpix, SparseHealpix
1315
from .data_analysis_bin import DataAnalysisBin
1416

1517

16-
def from_hdf5_file(map_tree_file, roi):
18+
def from_hdf5_file(map_tree_file, roi, transits):
1719
"""
1820
Create a MapTree object from a HDF5 file and a ROI. Do not use this directly, use map_tree_factory instead.
1921
@@ -25,12 +27,12 @@ def from_hdf5_file(map_tree_file, roi):
2527
# Read the data frames contained in the file
2628
with Serialization(map_tree_file) as serializer:
2729

28-
analysis_bins_df, _ = serializer.retrieve_pandas_object('/analysis_bins')
29-
meta_df, _ = serializer.retrieve_pandas_object('/analysis_bins_meta')
30-
roimap, roi_meta = serializer.retrieve_pandas_object('/ROI')
31-
32-
if len(roimap)>0:
33-
roi_meta['roimap']=roimap.values
30+
analysis_bins_df, _ = serializer.retrieve_pandas_object("/analysis_bins")
31+
meta_df, _ = serializer.retrieve_pandas_object("/analysis_bins_meta")
32+
roimap, roi_meta = serializer.retrieve_pandas_object("/ROI")
33+
34+
if len(roimap) > 0:
35+
roi_meta["roimap"] = roimap.values
3436

3537
# Let's see if the file contains the definition of an ROI
3638
if len(roi_meta) > 0:
@@ -47,16 +49,19 @@ def from_hdf5_file(map_tree_file, roi):
4749
active_pixels_user = roi.active_pixels(1024)
4850

4951
# This verifies that active_pixels_file is a superset (or equal) to the user-provided set
50-
assert set(active_pixels_file) >= set(active_pixels_user), \
51-
"The ROI you provided (%s) is not a subset " \
52+
assert set(active_pixels_file) >= set(active_pixels_user), (
53+
"The ROI you provided (%s) is not a subset "
5254
"of the one contained in the file (%s)" % (roi, file_roi)
55+
)
5356

5457
else:
5558

5659
# The user has provided no ROI, but the file contains one. Let's issue a warning
57-
log.warning("You did not provide any ROI but the map tree %s contains "
58-
"only data within the ROI %s. "
59-
"Only those will be used." % (map_tree_file, file_roi))
60+
log.warning(
61+
"You did not provide any ROI but the map tree %s contains "
62+
"only data within the ROI %s. "
63+
"Only those will be used." % (map_tree_file, file_roi)
64+
)
6065

6166
# Make a copy of the file ROI and use it as if the user provided that one
6267
roi = get_roi_from_dict(file_roi.to_dict())
@@ -69,6 +74,12 @@ def from_hdf5_file(map_tree_file, roi):
6974

7075
data_analysis_bins = collections.OrderedDict()
7176

77+
n_transits = meta_df["n_transits"].max() if transits is None else transits
78+
79+
# assert (
80+
# n_transits <= meta_df["transits"].max()
81+
# ), "Cannot use higher value than that of maptree"
82+
7283
for bin_name in bin_names:
7384

7485
this_df = analysis_bins_df.loc[bin_name]
@@ -77,31 +88,48 @@ def from_hdf5_file(map_tree_file, roi):
7788
if roi is not None:
7889

7990
# Get the active pixels for this plane
80-
active_pixels_user = roi.active_pixels(int(this_meta['nside']))
91+
active_pixels_user = roi.active_pixels(int(this_meta["nside"]))
8192

8293
# Read only the pixels that the user wants
83-
observation_hpx_map = SparseHealpix(this_df.loc[active_pixels_user, 'observation'].values,
84-
active_pixels_user, this_meta['nside'])
85-
background_hpx_map = SparseHealpix(this_df.loc[active_pixels_user, 'background'].values,
86-
active_pixels_user, this_meta['nside'])
94+
observation_hpx_map = SparseHealpix(
95+
this_df.loc[active_pixels_user, "observation"].values
96+
* (n_transits / meta_df["n_transits"].max()),
97+
active_pixels_user,
98+
this_meta["nside"],
99+
)
100+
background_hpx_map = SparseHealpix(
101+
this_df.loc[active_pixels_user, "background"].values
102+
* (n_transits / meta_df["n_transits"].max()),
103+
active_pixels_user,
104+
this_meta["nside"],
105+
)
87106

88107
else:
89108

90109
# Full sky
91-
observation_hpx_map = DenseHealpix(this_df.loc[:, 'observation'].values)
92-
background_hpx_map = DenseHealpix(this_df.loc[:, 'background'].values)
110+
observation_hpx_map = DenseHealpix(
111+
this_df.loc[:, "observation"].values
112+
* (n_transits / meta_df["n_transits"].max())
113+
)
114+
background_hpx_map = DenseHealpix(
115+
this_df.loc[:, "background"].values
116+
* (n_transits / meta_df["n_transits"].max())
117+
)
93118

94119
# This signals the DataAnalysisBin that we are dealing with a full sky map
95120
active_pixels_user = None
96121

97122
# Let's now build the instance
98-
this_bin = DataAnalysisBin(bin_name,
99-
observation_hpx_map=observation_hpx_map,
100-
background_hpx_map=background_hpx_map,
101-
active_pixels_ids=active_pixels_user,
102-
n_transits=this_meta['n_transits'],
103-
scheme='RING' if this_meta['scheme'] == 0 else 'NEST')
123+
this_bin = DataAnalysisBin(
124+
bin_name,
125+
observation_hpx_map=observation_hpx_map,
126+
background_hpx_map=background_hpx_map,
127+
active_pixels_ids=active_pixels_user,
128+
# n_transits=this_meta["n_transits"],
129+
n_transits=n_transits,
130+
scheme="RING" if this_meta["scheme"] == 0 else "NEST",
131+
)
104132

105133
data_analysis_bins[bin_name] = this_bin
106134

107-
return data_analysis_bins
135+
return data_analysis_bins, n_transits

0 commit comments

Comments
 (0)