Skip to content

Commit

Permalink
New skymap handling (#61)
Browse files Browse the repository at this point in the history
* skip check for events if before 2020

* load rates in sections if ext fra longer than 45 days

* correct ex time

* deal with timezones, if needed

* minor typos

* bump version to 4.1.0

* use start time not event time to find results file

* correct link to wp posted to slack

* Move old skymap stuff to new class and add bool to clarify type of skymap

* if option given print nearby events to screen

* new option for deltallh map, default prob map

* new option to print nearby events if desired

* minor version increment for new release

* increase default no of trials run

* change name of arg to make it clear this is for the circular num, not notice
  • Loading branch information
jessiethw authored Oct 9, 2024
1 parent 065d1c0 commit 23c8753
Show file tree
Hide file tree
Showing 12 changed files with 67 additions and 35 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ In order to run this analysis, you should find the alert event time (sent in the
Once you have these details, you can run the analysis with

```console
python run_track_followup.py --skymap=/home/followup/output_plots/run{RUNID}.evt{EVENTID}.HESE.skymap_nside_512.fits.gz --time={ALERT_MJD} --gcn_notice_num={GCN_CIRCULAR_NUMBER} --alert_id={RUNID}:{EVENTID}
python run_track_followup.py --skymap=/home/followup/output_plots/run{RUNID}.evt{EVENTID}.neutrino_8_64_512_1024.skymap_nside_1024.fits.gz --time={ALERT_MJD} --alert_circ={GCN_CIRCULAR_NUMBER} --alert_id={RUNID}:{EVENTID}
```

This will run two analyses, one with a time window of 1000 seconds and one with a time window of 2 days, both centered on the alert time. It will also remove the alert event from the sample, and it will assume a spectral index of -2.5, which is different than the -2 used for the `run_external_followup.py` script.
Expand All @@ -141,7 +141,7 @@ To run the analysis to follow up a graviational wave event, you will need to nav

As an example, there is a sample map included here: `fast_response/sample_skymaps/S191216ap_update.xml`. To run this event, the input would look like:
```console
python run_gw_followup.py --name="S191216ap Update" --time=58833.893 --skymap="https://gracedb.ligo.org/api/superevents/S191216ap/files/LALInference.fits.gz,0"
python run_gw_followup.py --name="S191216ap Update" --time=58833.89836 --skymap="https://gracedb.ligo.org/api/superevents/S191216ap/files/LALInference.fits.gz,0"
```
Additionally, you can choose to pass the argument `--allow_neg_ts` (bool) if you want to use the convention where a negative TS is allowed. The default is to use the convention TS>=0.

Expand Down
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
project = 'FastResponseAnalysis'
copyright = '2023, Alex Pizzuto, Jessie Thwaites'
author = 'Alex Pizzuto, Jessie Thwaites'
release = '1.2.2'
release = '1.3.0'

# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
Expand Down
22 changes: 20 additions & 2 deletions fast_response/AlertFollowup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ class AlertFollowup(PriorFollowup):
_fix_index = True
_float_index = not _fix_index
_index = 2.5
_llh_map = False

def run_background_trials(self, ntrials = 1000):
r"""For alert events with specific time windows,
Expand Down Expand Up @@ -257,7 +258,22 @@ class TrackFollowup(AlertFollowup):
"""
Class for followup of track alert events
By default, uses a fixed index of 2.5 in LLH.
By default, converts milipede LLH map to a PDF.
Assumes a probability skymap for the follow-up.
Built on AlertFollowup and PriorFollowup base classes
"""
_smear = False
_dataset = "GFUOnline_v001p02"
_fix_index = True
_float_index = not _fix_index
_index = 2.5
_llh_map = False

class TrackFollowupLLH(AlertFollowup):
"""
[Old] Class for followup of track alert events
By default, uses a fixed index of 2.5 in LLH
and converts milipede LLH map to a PDF.
Built on AlertFollowup and PriorFollowup base classes
"""
Expand All @@ -266,6 +282,7 @@ class TrackFollowup(AlertFollowup):
_fix_index = True
_float_index = not _fix_index
_index = 2.5
_llh_map = True

def format_skymap(self, skymap):
"""
Expand Down Expand Up @@ -370,4 +387,5 @@ class CascadeFollowup(AlertFollowup):
_dataset = "GFUOnline_v001p02"
_fix_index = True
_float_index = not _fix_index
_index = 2.5
_index = 2.5
_llh_map = False
28 changes: 16 additions & 12 deletions fast_response/FastResponseAnalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ def __init__(self, name, tstart, tstop, skipped=None, seed=None,
outdir=None, save=True, extension=None):
self.name = name

if seed is not None:
self.llh_seed(seed)
# if seed is not None:
# self.llh_seed(seed)
if outdir is None:
outdir = os.environ.get('FAST_RESPONSE_OUTPUT')
if outdir is None:
Expand Down Expand Up @@ -581,8 +581,9 @@ def plot_skymap_zoom(self, with_contour=False, contour_files=None):
cont = np.loadtxt(c_file, skiprows=1)
cont_ra = cont.T[0]
cont_dec = cont.T[1]
label = 'Millipede 50\%, 90\% (160427A syst.)' \
if contour_counter == 0 else ''
cont_label = 'Millipede 50\%, 90\% (160427A syst.)' if self._llh_map else 'Skymap 50\%, 90\%'
label = cont_label if contour_counter == 0 else ''

hp.projplot(np.pi/2. - cont_dec, cont_ra, linewidth=3.,
color='k', linestyle=cont_ls[contour_counter], coord='C',
label=label)
Expand Down Expand Up @@ -872,7 +873,7 @@ def run_background_trials(self, ntrials=1000):
self.tsd = tsd
self.save_items['tsd'] = tsd

def find_coincident_events(self):
def find_coincident_events(self, print_events=False):
r"""Find coincident events for a skymap
based analysis. These are ontime events that are also in the
90% contour of the skymap
Expand All @@ -885,13 +886,16 @@ def find_coincident_events(self):
overlap = np.isin(exp_pix, self.ipix_90)
events = events[overlap]

# print nearby events, as a check (if needed)
# msk1 = (self.llh.exp[t_mask]['ra'] < (self.skymap_fit_ra+np.radians(5)))*(self.llh.exp[t_mask]['ra'] > (self.skymap_fit_ra-np.radians(5)))
# msk2 = (self.llh.exp[t_mask]['dec'] < (self.skymap_fit_dec+np.radians(5)))*((self.llh.exp[t_mask]['dec'] > self.skymap_fit_dec-np.radians(5)))
# msk3 = msk1*msk2
# print('Nearby events:')
# print("[run, event, ra, dec, sigma, logE, time]")
# for e in self.llh.exp[t_mask][msk3]: print([e[k] for k in ['run', 'event', 'ra', 'dec', 'sigma', 'logE', 'time']])
if print_events:
# print nearby events, as a check (if needed)
msk1 = (self.llh.exp[t_mask]['ra'] < (self.skymap_fit_ra+np.radians(5)))*(self.llh.exp[t_mask]['ra'] > (self.skymap_fit_ra-np.radians(5)))
msk2 = (self.llh.exp[t_mask]['dec'] < (self.skymap_fit_dec+np.radians(5)))*((self.llh.exp[t_mask]['dec'] > self.skymap_fit_dec-np.radians(5)))
msk3 = msk1*msk2
if np.count_nonzero(msk3) > 0:
print('Events within 5 deg of best-fit:')
print("[run, event, ra, dec, sigma, logE, time]")
for e in self.llh.exp[t_mask][msk3]:
print([e[k] for k in ['run', 'event', 'ra', 'dec', 'sigma', 'logE', 'time']])

if len(events) == 0:
coincident_events = []
Expand Down
7 changes: 5 additions & 2 deletions fast_response/listeners/gw_gcn_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,14 +177,17 @@ def process_gcn(payload, root):
#'--allow_neg_ts=True']
)

output = os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),eventtime[0:10].replace('-','_')+'_'+name)
analysis_start = Time(event_mjd - 500./86400., format='mjd').iso
output = os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),
analysis_start[0:10].replace('-','_')+'_'+name)
#update webpages
webpage_update = os.path.join(analysis_path,'document.py')
if not mock and root.attrib['role'] == 'observation':
try:
subprocess.call([webpage_update, '--gw', f'--path={output}'])

wp_link = 'https://user-web.icecube.wisc.edu/~jthwaites/FastResponse/gw-webpage/output/{}.html'.format(eventtime[0:10].replace('-','_')+'_'+name)
wp_link = 'https://user-web.icecube.wisc.edu/~jthwaites/FastResponse/gw-webpage/output/'+\
'{}.html'.format(analysis_start[0:10].replace('-','_')+'_'+name)
slack_message = "UML GW analysis finished running for event {}: <{}|link>.".format(name, wp_link)
with open('../slack_posters/internal_alert_slackbot.txt') as f:
chan = f.readline().rstrip('\n')
Expand Down
1 change: 0 additions & 1 deletion fast_response/make_ontime_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ def make_rate_plots(time_window, run_table, query_events, dirname, season='neutr
format='mjd').iso #first 35 days
rates1 = icecube.realtime_tools.live.get_rates(run_table[0]['start'], midpt)
rates2 = icecube.realtime_tools.live.get_rates(midpt, run_table[-1]['stop'])
#avoid double counting last entry
rates = np.concatenate([rates1, rates2])
else:
rates = icecube.realtime_tools.live.get_rates(run_table[0]['start'], run_table[-1]['stop'])
Expand Down
2 changes: 2 additions & 0 deletions fast_response/reports/ReportGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ def ontime_table(query_dict):
newevent = event['value']['data']
for key,val in list(newevent['reco']['splinempe'].items()):
newevent['splinempe_'+key]=val
if '+' in newevent['eventtime']:
newevent['eventtime'] = newevent['eventtime'].split('+')[0]
if Time(newevent['eventtime'],scale='utc',format='iso') >= Time("2018-07-10 17:52:03.34", format='iso',scale='utc'):
newevent['muex'] = newevent['reco']['energy']['mpe_muex']
del newevent['reco']
Expand Down
5 changes: 3 additions & 2 deletions fast_response/scripts/combine_results_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
return

collected_results = {}
collected_results["$schema"]= "https://gcn.nasa.gov/schema/v4.0.0/gcn/notices/icecube/lvk_nu_track_search.schema.json"
collected_results["$schema"]= "https://gcn.nasa.gov/schema/v4.1.0/gcn/notices/icecube/lvk_nu_track_search.schema.json"
collected_results["type"]= "IceCube LVK Alert Nu Track Search"

eventtime = record.find('.//ISOTime').text
Expand Down Expand Up @@ -235,7 +235,8 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False):
)

while results_done == False:
start_date = Time(dateutil.parser.parse(eventtime)).datetime
start_time = Time(Time(eventtime, format='isot').mjd - 500./86400., format='mjd').isot
start_date = Time(dateutil.parser.parse(start_time)).datetime
start_str = f'{start_date.year:02d}_{start_date.month:02d}_{start_date.day:02d}'

uml_results_path = os.path.join(fra_results_location, start_str + '_' + name.replace(' ', '_') \
Expand Down
4 changes: 2 additions & 2 deletions fast_response/scripts/run_external_followup.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,9 @@ def run_analysis(args):
type= lambda z:[ tuple(int(y) for y in x.split(':')) for x in z.split(',')],
help="Event to exclude from the analyses, eg."
"Example --skip-events=127853:67093193")
parser.add_argument('--ntrials', default=100, type=int,
parser.add_argument('--ntrials', default=200, type=int,
help="Number of background trials to perform")
parser.add_argument('--n_per_sig', default=100, type=int,
parser.add_argument('--n_per_sig', default=200, type=int,
help="Number of signal trials per injected ns to perform")
parser.add_argument('--document', default=False, action='store_true')
args = parser.parse_args()
Expand Down
23 changes: 14 additions & 9 deletions fast_response/scripts/run_track_followup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@
import os, argparse, glob
from astropy.time import Time

from fast_response.AlertFollowup import TrackFollowup
from fast_response.AlertFollowup import TrackFollowup, TrackFollowupLLH
import fast_response.web_utils as web_utils

parser = argparse.ArgumentParser(description='Fast Response Analysis')
parser.add_argument('--skymap', type=str, default=None,
help='path to skymap')
parser.add_argument('--time', type=float, default=None,
help='Time of the alert event (mjd)')
parser.add_argument('--gcn_notice_num', default=0, type=int,
parser.add_argument('--alert_circ', default=0, type=int,
help="GCN Circular NUMBER for updated circular (not Bacodine/Notice)")
parser.add_argument('--alert_id', default=None,
type= lambda z:[ tuple(int(y) for y in x.split(':')) for x in z.split(',')],
Expand All @@ -27,6 +27,10 @@
parser.add_argument('--suffix', type=str, default='A',
help="letter to differentiate multiple alerts on the same day (default = A)."
"Event name given by IceCube-yymmdd + suffix.")
parser.add_argument('--print_nearby', action='store_true', default=False,
help='Option to print events within 5 degrees of best-fit to the screen (default False)')
parser.add_argument('--deltallh_skymap', action='store_true', default=False,
help='Option MUST be raised if using deltaLLH map (default: False, assumes probability map)')
args = parser.parse_args()

track_time = Time(args.time, format='mjd')
Expand All @@ -53,27 +57,28 @@
for containment in ['50', '90']:
contour_fs = contour_fs + glob.glob(base_skymap_path +
f'run{run_id:08d}.evt{ev_id:012d}.*.contour_{containment}.txt')
# contour_fs = [base_skymap_path \
# + f'run{run_id:08d}.evt{ev_id:012d}.HESE.contour_{containment}.txt' \
# for containment in ['50', '90']]
# contour_fs = [f for f in contour_fs if os.path.exists(f)]
if len(contour_fs) == 0:
contour_fs = None

f = TrackFollowup(name, args.skymap, start, stop, skipped=args.alert_id)
if args.deltallh_skymap:
print('Initializing Track follow up with deltaLLH map')
f = TrackFollowupLLH(name, args.skymap, start, stop, skipped=args.alert_id)
else:
print('Initializing Track follow up with probability skymap')
f = TrackFollowup(name, args.skymap, start, stop, skipped=args.alert_id)

f.unblind_TS()
f.plot_ontime(contour_files=contour_fs)
f.calc_pvalue()
f.make_dNdE()
f.plot_tsd()
f.upper_limit()
f.find_coincident_events()
f.find_coincident_events(print_events=args.print_nearby)
results = f.save_results()
f.generate_report()
all_results[delta_t] = results

all_results[1000.]['gcn_num'] = args.gcn_notice_num
all_results[1000.]['gcn_num'] = args.alert_circ

# Write circular to the output directory of the 2 day analysis
f.write_circular(all_results)
Expand Down
2 changes: 1 addition & 1 deletion fast_response/web_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def updateDataFrame(analysis, gw=False, make_df=False):
num = np.count_nonzero(df.index == analysis['name'])
analysis['name'] += '_{}'.format(num)
df.loc[analysis['name']] = new_list

if gw:
df.to_pickle(f'{base_path}/results_dataframe_gw.pkl')
else:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import setuptools

long_message = 'Fast Response Analysis'
version = "1.2.2"
version = "1.3.0"

setuptools.setup(
name="fast_response",
Expand Down

0 comments on commit 23c8753

Please sign in to comment.