diff --git a/fast_response/AlertFollowup.py b/fast_response/AlertFollowup.py index 7e81c61..11a59d2 100644 --- a/fast_response/AlertFollowup.py +++ b/fast_response/AlertFollowup.py @@ -14,6 +14,14 @@ from . import sensitivity_utils class AlertFollowup(PriorFollowup): + ''' + Class for external skymap followup. + By default, uses a fixed index of 2.5 in LLH. + + See also: + ---------- + PriorFollowup: class for skymap-based analyses + ''' _smear = False _dataset = "GFUOnline_v001p02" _fix_index = True @@ -26,9 +34,9 @@ def run_background_trials(self, ntrials = 1000): Returns: -------- - tsd: array-like - test-statistic distribution with weighting - from alert event spatial prior + tsd: array-like + test-statistic distribution with weighting + from alert event spatial prior ''' current_rate = self.llh.nbackground / (self.duration * 86400.) * 1000. closest_rate = sensitivity_utils.find_nearest(np.linspace(6.2, 7.2, 6), current_rate) @@ -59,8 +67,16 @@ def upper_limit(self): self.sens_range_plot() def ps_sens_range(self): - r'''Compute the minimum and maximum sensitivities - within the 90% contour of the skymap''' + r''' + Compute the minimum and maximum sensitivities + within the 90% contour of the skymap + + Returns: + ---------- + float, float: + lowest sensitivity within the 90% contour of the skymap + highest sensitivity within the 90% contour of the skymap + ''' sens_dir = '/data/ana/analyses/NuSources/2021_v2_alert_stacking_FRA/' \ + 'fast_response/reference_sensitivity_curves/' @@ -77,8 +93,10 @@ def ps_sens_range(self): return low, high def sens_range_plot(self): - r'''For alert events, make a sensitivity plot highlighting - the region where the contour lives''' + r''' + For alert events, make a sensitivity plot highlighting + the region where the contour lives + ''' fig, ax = plt.subplots() sens_dir = '/data/ana/analyses/NuSources/2021_v2_alert_stacking_FRA/' \ @@ -108,13 +126,28 @@ def sens_range_plot(self): plt.savefig(self.analysispath + '/upper_limit_distribution.png', bbox_inches='tight', dpi=200) def generate_report(self): - r'''Generates report using class attributes - and the ReportGenerator Class''' + r''' + Generates report using class attributes + and the ReportGenerator Class + + See Also: + ---------- + ReportGenerator.AlertReport + ''' report = AlertReport(self) report.generate_report() report.make_pdf() def write_circular(self, alert_results): + ''' + Generate a circular from internal followup template. + Saves a text file in the output directory as alertid_circular.txt + + Parameters: + ------------ + alert_results: dict + Dictionary of results from the two time window analyses + ''' base = os.path.dirname(fast_response.__file__) template_path = os.path.join(base, 'circular_templates/internal_followup.txt') @@ -222,6 +255,16 @@ def write_circular(self, alert_results): class TrackFollowup(AlertFollowup): + ''' + Class for followup of track alert events + By default, uses a fixed index of 2.5 in LLH. + By default, converts milipede LLH map to a PDF + + See also: + ---------- + AlertFollowup: class for Alert event followup + PriorFollowup: class for skymap-based analyses + ''' _smear = True _dataset = "GFUOnline_v001p02" _fix_index = True @@ -229,12 +272,35 @@ class TrackFollowup(AlertFollowup): _index = 2.5 def format_skymap(self, skymap): + ''' + Format milipede LLH map to a PDF Healpy skymap + + Parameters: + ----------- + skymap: Healpy array + Array of milipede LLH values, read in via Healpy + + Returns: + ----------- + Healpy array: formatted skymap of probabilities + ''' skymap = self.convert_llh_to_prob(skymap) skymap = super().format_skymap(skymap) return skymap def ipixs_in_percentage(self, percentage): - """Finding ipix indices confined in a given percentage. + """ + Finding ipix indices confined in a given percentage. + + Parameters: + ----------- + percentage: float + percent containment to use. + Allowed values for alert events are 0.5 and 0.9 (50% and 90% containment) + + Returns: + ----------- + int array: array of indexes within the given percentage. """ skymap = self.skymap_llh if hp.pixelfunc.get_nside(skymap) != self._nside: @@ -256,6 +322,15 @@ def convert_llh_to_prob(self, skymap_fits): This takes a millipede map and converts from the likelihood values to a pdf assuming the order-preserving mapping we use to account for systematics + + Parameters: + ----------- + skymap_fits: Healpy array + Milipede LLH values read from scanner output + + Returns: + ---------- + Healpy array of scaled probability values ''' skymap_llh = skymap_fits.copy() self.skymap_llh = skymap_llh @@ -286,6 +361,15 @@ def _scale_2d_gauss(self, arr, sigma_arr, new_sigma): return tmp / np.sum(tmp) class CascadeFollowup(AlertFollowup): + ''' + Class for followup of cascade alert events + By default, uses a fixed index of 2.5 in LLH. + + See also: + ---------- + AlertFollowup: class for Alert event followup + PriorFollowup: class for skymap-based analyses + ''' _smear = False _dataset = "GFUOnline_v001p02" _fix_index = True diff --git a/fast_response/ExternalFollowup.py b/fast_response/ExternalFollowup.py index 1a126de..fe8cd0d 100644 --- a/fast_response/ExternalFollowup.py +++ b/fast_response/ExternalFollowup.py @@ -1,12 +1,28 @@ from .FastResponseAnalysis import PointSourceFollowup, PriorFollowup class ExternalFollowup(PointSourceFollowup): + ''' + Class for external point-source or extended source followup. + By default, uses a fixed index of 2.0 in LLH. + + See also: + ---------- + PointSourceFollowup: class for a general point source followup + ''' _dataset = "GFUOnline_v001p02" _fix_index = True _float_index = not _fix_index _index = 2.0 class ExternalSkymapFollowup(PriorFollowup): + ''' + Class for external skymap followup. + By default, uses a fixed index of 2.0 in LLH. + + See also: + ---------- + PriorFollowup: class for skymap-based analyses + ''' _dataset = "GFUOnline_v001p02" _fix_index = True _float_index = not _fix_index diff --git a/fast_response/FastResponseAnalysis.py b/fast_response/FastResponseAnalysis.py index ce91d27..c6cbfcf 100644 --- a/fast_response/FastResponseAnalysis.py +++ b/fast_response/FastResponseAnalysis.py @@ -40,10 +40,11 @@ warnings.simplefilter("ignore", RuntimeWarning) class FastResponseAnalysis(object): - r''' Object to do realtime followup analyses of - astrophysical transients with arbitrary event - localization - ''' + """ + Object to do realtime followup analyses of + astrophysical transients with arbitrary event + localization + """ _dataset = None _fix_index = True _float_index = not _fix_index @@ -120,6 +121,7 @@ def __init__(self, name, tstart, tstop, skipped=None, seed=None, @property def dataset(self): + """Returns the dataset used""" return self._dataset @dataset.setter def dataset(self, x): @@ -127,6 +129,7 @@ def dataset(self, x): @property def index(self): + """Returns the spectral index""" return self._index @index.setter def index(self, x): @@ -134,6 +137,7 @@ def index(self, x): @property def llh_seed(self): + """Returns the seed used in the LLH""" return self._llh_seed @llh_seed.setter def llh_seed(self, x): @@ -142,9 +146,13 @@ def llh_seed(self, x): def get_data(self, livestream_start=None, livestream_stop=None): ''' Gets the skylab data and MC from querying the i3live livestream - arguments: - livestream_start and livestream_stop - when to start and stop grabbing data - (needed due to low latency in GW followups) + + Parameters: + ----------- + livestream_start: float + (optional) start time for getting data (MJD) from i3Live. Default is start time of the analysis - 5 days + livestream_stop: float + (optional) stop time for getting data (MJD). Default is stop time of the analysis ''' if self._verbose: print("Grabbing data") @@ -206,6 +214,18 @@ def get_data(self, livestream_start=None, livestream_stop=None): def initialize_llh(self, skipped=None, scramble=False): ''' Grab data and format it all into a skylab llh object + + Parameters: + ----------- + skipped: array of tuples + (optional) event(s) to be removed in the analysis. + Format: [(run_id,event_id),...] + scramble: bool + (optional) run on scrambled data (default=False) + + Returns: + ---------- + llh: skylab llh object ''' if self.exp is None: self.get_data() @@ -264,6 +284,21 @@ def initialize_llh(self, skipped=None, scramble=False): def remove_event(self, exp, dset, skipped): ''' Remove a given event from the analysis, eg. for an alert event + + Parameters: + ----------- + exp: skylab data object + Data events loaded from livestream + dset: skylab Dataset object + Dataset used in the analysis + skipped: array of tuples + Event(s) to be attempted to be removed in the analysis. + Format: [(run_id,event_id),...] + + Returns: + ---------- + exp: skylab data object + Data events, minus the removed event (if successful) ''' try: event = skipped[0] @@ -292,7 +327,11 @@ def calc_pvalue(self, ntrials=1000, run_anyway=False): Parameters: ----------- - None + ntrials: int + Number of trials to run (default 1000) + run_anyway: bool + Choose to override and run background trials, even if TS=0 (default False) + Returns: -------- p: float @@ -343,8 +382,14 @@ def make_all_report_tables(self): pass def plot_tsd(self, allow_neg=False): - r'''Outputs a plot of the background TS distributions + r''' + Outputs a plot of the background TS distributions as well as the observed TS + + Parameters: + ----------- + allow_neg: bool + Choose to allow negative TS values (all negative values are then TS=0). Default False ''' if self.tsd is not None: fig, ax = plt.subplots() @@ -405,6 +450,7 @@ def significance(self, p): ---------- p: float P value + Returns: -------- sigma: float @@ -415,16 +461,19 @@ def significance(self, p): return sigma def save_results(self, alt_path=None): - r'''Once analysis has been performed, push - all relevant info to a new directory + r'''Once analysis has been performed, save + all relevant info to a new directory. + Saves to a file called [analysisid]_results.pickle + Parameters: ----------- - None + alt_path: string + optional specified directory to save results to + Returns: - ________ - None - - TODO: Make a dictionary of keys that we want to save + ----------- + save_items: (dict) + dictionary of saved analysis parameters ''' if alt_path is None: print("Saving results to directory:\n\t{}".format(self.analysispath)) @@ -440,10 +489,17 @@ def save_results(self, alt_path=None): return self.save_items def plot_ontime(self, with_contour=False, contour_files=None, label_events=False): - r'''Plots ontime events with either the full sky spatial - prior or a zoomed in version like traditional fast response - followups - label_events: adds a number label to events on skymap + r'''Plots ontime events on the full skymap and a + zoomed in version near the scan best-fit + + Parameters: + ----------- + with_contour: bool + plots the 90% containment contour of a skymap (default False) + contour_files: string + text file containing skymap contours to be plotted (default None) + label_events: bool + adds a number label to events on skymap (default False) ''' try: self.plot_skymap_zoom(with_contour=with_contour, contour_files=contour_files) @@ -453,7 +509,15 @@ def plot_ontime(self, with_contour=False, contour_files=None, label_events=False def plot_skymap_zoom(self, with_contour=False, contour_files=None): r'''Make a zoomed in portion of a skymap with - all neutrino events within a certain range + all ontime neutrino events within a certain range + Outputs a plot (in png and pdf formats) to the analysis path + + Parameters: + ----------- + with_contour: bool + plots the 90% containment contour of a skymap (default False) + contour_files: string + text file containing skymap contours to be plotted (default None) ''' events = self.llh.exp events = events[(events['time'] < self.stop) & (events['time'] > self.start)] @@ -538,6 +602,16 @@ def plot_skymap_zoom(self, with_contour=False, contour_files=None): def plot_skymap(self, with_contour=False, contour_files=None, label_events=False): r''' Make skymap with event localization and all neutrino events on the sky within the given time window + Outputs a plot in png format to the analysis path + + Parameters: + ----------- + with_contour: bool + plots the 90% containment contour of a skymap (default False) + contour_files: string + text file containing skymap contours to be plotted (default None) + label_events: bool + adds a number label to events on skymap (default False) ''' events = self.llh.exp @@ -649,34 +723,17 @@ def plot_skymap(self, with_contour=False, contour_files=None, label_events=False def generate_report(self): r'''Generates report using class attributes - and the ReportGenerator Class''' + and the ReportGenerator Class + Makes full report and outputs as pdf + ''' report = FastResponseReport(self) report.generate_report() report.make_pdf() - # report_kwargs = vars(self).copy() - # print("\nGenerating PDF Report") - # for key in ['name', 'trigger', 'start', 'stop', 'ts', 'ns', 'source_type', 'analysisid']: - # report_kwargs.pop(key) - # report = ReportGenerator(self.name, self.trigger, self.start, self.stop, - # self.ts, self.ns, self.source_type, self.analysisid, **report_kwargs) - # report.generate_report() - # report.make_pdf() - # print("Report saved to output directory") - # username = subprocess.check_output(['whoami']).decode("utf-8").strip('\n') - # if os.path.isdir('/home/{}/public_html/FastResponse/'.format(username)): - # print("Copying report to {}'s public html in FastResponse Directory".format(username)) - # shutil.copy2(self.analysispath + '/' + self.analysisid + "_report.pdf", - # '/home/{}/public_html/FastResponse/{}_report.pdf'.format(username, self.analysisid)) - # else: - # print("Creating FastResponse Directory in {}'s public html and copying report") - # os.mkdir('/home/{}/public_html/FastResponse/'.format(username)) - # shutil.copy2(self.analysispath + '/' + self.analysisid + "_report.pdf", - # '/home/{}/public_html/FastResponse/{}_report.pdf'.format(username, self.analysisid)) class PriorFollowup(FastResponseAnalysis): ''' - Class for skymap based analyses + Class for skymap-based analyses ''' _pixel_scan_nsigma = 4.0 _containment = 0.99 @@ -717,6 +774,19 @@ def __str__(self): return int_str def format_skymap(self, skymap): + r'''Method to up or downgrade nside of a skymap to + the nside used in the analysis + + Parameters: + ----------- + skymap: array + Healpix skymap from a file + + Returns: + ---------- + skymap: array + Healpix skymap, with correct nside for use in FRA + ''' if hp.pixelfunc.get_nside(skymap) != self._nside: skymap = hp.pixelfunc.ud_grade(skymap, self._nside, power=-2) skymap = skymap/skymap.sum() @@ -728,12 +798,15 @@ def initialize_injector(self, e_range=(0., np.inf)): Parameters: ----------- - self - gamma: float - spectral index to inject + e_range: tuple + optional energy range allowed for injector. default (0., np.inf) + Returns: -------- - inj: Skylab injector object''' + inj: Skylab injector object + Spatial prior injector using skylab + ''' + print("Initializing Prior Injector") spatial_prior = SpatialPrior(self.skymap, containment = self._containment, allow_neg=self._allow_neg) self.spatial_prior = spatial_prior @@ -752,11 +825,12 @@ def initialize_injector(self, e_range=(0., np.inf)): def run_background_trials(self, ntrials=1000): r''' helper method to run background trials for the spatial prior analyses + Note: this method is not used in running the analysis in realtime Returns: -------- - tsd: array-like - TS distribution for the background only trials + ntrials: int + number of trials to run (default 1000) ''' tsd = [] spatial_prior = SpatialPrior(self.skymap, containment = self._containment, allow_neg=self._allow_neg) @@ -788,7 +862,10 @@ def run_background_trials(self, ntrials=1000): def find_coincident_events(self): r'''Find "coincident events" for a skymap - based analysis''' + based analysis. + These are ontime events that are also in the + 90% contour of the skymap + ''' t_mask=(self.llh.exp['time']<=self.stop)&(self.llh.exp['time']>=self.start) events = self.llh.exp[t_mask] exp_theta = 0.5*np.pi - events['dec'] @@ -817,11 +894,20 @@ def unblind_TS(self, custom_events=None): Parameters: ----------- - self - -------- - ts, ns: Test statistic and best fit ns + custom_events: array + specific events for use in scan (UNUSED) + + Returns: + ----------- + ts: float + unblinded test statistic + ns: float + unblinded fitted number of signal events at the best-fit location + gamma: float + unblinded fitted spectral index at the best-fit location + returned only if spectral index is floated + ''' - # TODO: Fix the case of getting best-fit gamma t0 = time.time() spatial_prior = SpatialPrior( self.skymap, containment = self._containment, @@ -888,21 +974,24 @@ def unblind_TS(self, custom_events=None): return ts, ns def upper_limit(self): + ''' UPPER LIMIT WITH SPATIAL PRIOR NOT YET IMPLEMENTED + ''' print("Upper limit with spatial prior not yet implemented") pass def ipixs_in_percentage(self, percentage): """Finding ipix indices confined in a given percentage. - Input parameters + Parameters ---------------- skymap: ndarray array of probabilities for a skymap percentage : float fractional percentage from 0 to 1 - Return + + Returns ------- - ipix : numpy array + ipix : numpy int array indices of pixels within percentage containment """ # TODO: Find a more efficient way to do this @@ -950,7 +1039,9 @@ def dec_skymap_range(self): def make_dNdE(self): r'''Make an E^-2 or E^-2.5 dNdE with the central 90% - for the most relevant declination''' + for the minimum and maximum declinations on the skymap + ''' + min_dec, max_dec = self.dec_skymap_range() dec_mask_1 = self.llh.mc['dec'] > min_dec - (5. * np.pi / 180.) dec_mask_2 = self.llh.mc['dec'] < min_dec + (5. * np.pi / 180.) @@ -999,6 +1090,8 @@ def make_dNdE(self): self.save_items['energy_range'] = self.energy_range def ns_scan(self): + ''' NS SCAN WITH SPATIAL PRIOR NOT AN OPTION + ''' print("ns scan not an option for skymap based analyses") def write_circular(self): @@ -1040,10 +1133,13 @@ def __str__(self): return int_str def run_background_trials(self, ntrials=1000): - # if self._verbose: - # print("Need to reinitialize LLH for background trials") - # self.llh = self.initialize_llh( - # skipped=self.skipped, scramble=True) + r''' Method to run background trials for point source analysis + + Parameters: + ----------- + ntrials: int + number of background trials to run (default 1000) + ''' trials = self.llh.do_trials( int(ntrials), src_ra=self.ra, @@ -1052,6 +1148,19 @@ def run_background_trials(self, ntrials=1000): self.save_items['tsd'] = self.tsd def initialize_injector(self, e_range=(0., np.inf)): + r'''Method to make relevant injector in Skylab. + Used in calculating upper limit + + Parameters: + ----------- + e_range: tuple + optional energy range allowed for injector. default (0., np.inf) + + Returns: + -------- + inj: Skylab injector object + Point source injector using skylab + ''' inj = PointSourceInjector( gamma = self.index, E0 = 1000., @@ -1065,14 +1174,14 @@ def initialize_injector(self, e_range=(0., np.inf)): self.inj = inj def unblind_TS(self): - r''' Unblind TS, either sky scan for spatial prior, - or just at one location for a point source + r''' Unblind TS at one location for a point source - Parameters: - ----------- - self + Returns -------- - ts, ns: Test statistic and best fit ns + ts: float + unblinded test statistic + ns: float + best fit ns ''' # Fix the case of getting best-fit gamma # TODO: What if gamma is floated @@ -1090,6 +1199,10 @@ def unblind_TS(self): return ts, ns def find_coincident_events(self): + r'''Find "coincident events" for the analysis. + These are ontime events that have a spatial times energy weight > 10 + + ''' spatial_weights = self.llh.llh_model.signal( self.ra, self.dec, self.llh._events, src_extension=self.extension)[0] / self.llh._events['B'] @@ -1150,9 +1263,18 @@ def ns_scan(self, params = {'spectrum': 'dN/dE = 1.00e+00 * (E / 1.00e+03 GeV)^- def upper_limit(self, n_per_sig=100, p0=None): r'''After calculating TS, find upper limit Assuming an E^-2 spectrum + + Parameters: + ----------- + n_per_sig: int + number of trials per injected signal events to run + p0: None, scalar, or n-length sequence + Initial guess for the parameters passed to curve_fit (optional, default None) + Returns: -------- - Value of E^2 dN / dE in units of TeV / cm^2 + upperlimit: float + Value of E^2 dN / dE in units of TeV / cm^2 ''' if self.inj is None: self.initialize_injector() @@ -1218,7 +1340,9 @@ def upper_limit(self, n_per_sig=100, p0=None): def make_dNdE(self): r'''Make an E^-2 or E^-2.5 dNdE with the central 90% - for the most relevant declination''' + for the most relevant declination band + (+/- 5 deg around source dec) + ''' dec_mask_1 = self.llh.mc['dec'] > self.dec - (5. * np.pi / 180.) dec_mask_2 = self.llh.mc['dec'] < self.dec + (5. * np.pi / 180.) dec_mask_3, dec_mask_4 = None, None diff --git a/fast_response/GWFollowup.py b/fast_response/GWFollowup.py index de5fd6d..4fc5c8d 100644 --- a/fast_response/GWFollowup.py +++ b/fast_response/GWFollowup.py @@ -336,8 +336,6 @@ def write_circular(self): gcn = gcn_template.read() low_sens, high_sens = self.sens_range - # for key, val in [('', f'{low_sens:1.3f}'), - # ()] gcn = gcn.replace('', '{:1.3f}'.format(low_sens)) gcn = gcn.replace('', '{:1.3f}'.format(high_sens)) gcn = gcn.replace('' , gw_name) @@ -350,10 +348,9 @@ def write_circular(self): gcn_file.close() else: - significance = '{:1.2f}'.format(self.significance(pvalue)) + #significance = '{:1.2f}'.format(self.significance(pvalue)) - #info = '
\n' - info = '
\t\t \t\t \t\t \t\t\t\t \t\t\t\t\t \n' + info = '
\t \t\t \t\t \t\t\t \t\t\t \n' table = '' n_coincident_events=0 for event in events: @@ -387,6 +384,9 @@ def write_circular(self): num = str(n_coincident_events) #num = events['pvalue'][events['pvalue']<=0.1].size gcn_file = open(self.analysispath+'/gcn_%s.txt' % gw_name,'w') + best_ra = '{:1.2f}'.format(np.rad2deg(self.skymap_fit_ra)) + best_dec = '{:1.2f}'.format(np.rad2deg(self.skymap_fit_dec)) + with open(template_path,'r') as gcn_template: for line in gcn_template.readlines(): @@ -395,14 +395,17 @@ def write_circular(self): line = line.replace('', noticeID) line = line.replace('', start_iso) line = line.replace('', stop_iso) + line = line.replace('', best_ra) + line = line.replace('', best_dec) + if pvalue<0.0013499: pval_str = '<0.00135' line = line.replace('', pval_str) - line = line.replace('', '>3') + #line = line.replace('', '>3') else: pval_str = '{:1.3f}'.format(pvalue) line = line.replace('', pval_str) - line = line.replace('', significance) + #line = line.replace('', significance) if '
' in line: line = table diff --git a/fast_response/MonitoringAndMocks/Data_Display.py b/fast_response/MonitoringAndMocks/Data_Display.py index 45b3b5b..7f22d44 100755 --- a/fast_response/MonitoringAndMocks/Data_Display.py +++ b/fast_response/MonitoringAndMocks/Data_Display.py @@ -8,52 +8,52 @@ matplotlib.rcParams['ytick.labelsize'] = 16 import matplotlib.pyplot as plt import numpy as np -import pickle, glob, os, datetime, pwd +import glob, os, datetime, pwd, pickle from datetime import date from statistics import median #from statistics import mode from matplotlib.dates import DateFormatter from astropy.time import Time import pandas as pd -#from IPython.display import HTML -#from IPython.display import display import subprocess import warnings warnings.filterwarnings('ignore', module='astropy._erfa') def dial_up(who="jessie"): cell_tower = "/home/jthwaites/private/" - #halp = "https://icecube.wisc.edu/~jthwaites/FastResponse/error_call.xml" - subprocess.call([cell_tower+"make_call.py", f"--{who}=True", '--troubleshoot=True'])#, "--call_file", halp]) - #print([cell_tower+"make_call.py", f"--{who}=True", '--troubleshoot=True']) + subprocess.call([cell_tower+"make_call.py", f"--{who}=True", '--troubleshoot=True']) -#path = "/data/user/jthwaites/FastResponseAnalysis/output/" path=os.environ.get('FAST_RESPONSE_OUTPUT') out_put = '/data/user/jthwaites/o4-mocks/' #Creating readable (and callable) files from ALL pickle files previously created in gw_gcn_listener if pwd.getpwuid(os.getuid())[0] == 'realtime': - Pickle_to_text = sorted(glob.glob(path+'PickledMocks/*MS*.pickle')+ - glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*MS*.pickle')) + Pickle_to_text = sorted(glob.glob(path+'PickledMocks/*MS*.pickle'))#+ + #glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*MS*.pickle')) else: Pickle_to_text = sorted(glob.glob(path+'PickledMocks/*MS*.pickle')) all_dictionary = {'Trigger_Time': [], 'GCN_Alert': [], 'End_Time': [], - 'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [], - 'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []} + 'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [], + 'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []} +print('Number of mocks to load: {}'.format(len(Pickle_to_text))) +i=0 for file in Pickle_to_text: - if os.path.exists(file): - with open(file, 'rb') as pickle_now: - Entries = pickle.load(pickle_now) - if int(Entries["Trigger_Time"]) == 59957: - continue #This is January 13th 2023, dates were messed up. - for Key in Entries.keys(): - if Key=="Ligo_Latency": - all_dictionary['LVK_Latency'].append(Entries[Key]) - else: - all_dictionary[Key].append(Entries[Key]) - all_dictionary['Name'].append(file) + #if i%100==0: print(i) + if os.path.exists(file): + with open(file, 'rb') as pickle_now: + Entries = pickle.load(pickle_now) + if int(Entries["Trigger_Time"]) == 59957: + continue #This is January 13th 2023, dates were messed up. + for Key in Entries.keys(): + if Key=="Ligo_Latency": + all_dictionary['LVK_Latency'].append(Entries[Key]) + else: + all_dictionary[Key].append(Entries[Key]) + all_dictionary['Name'].append(file) + i+=1 + for file in all_dictionary["Name"]: date_format = "%Y-%m-%d" c_timestamp = os.path.getmtime(file) @@ -68,57 +68,56 @@ def dial_up(who="jessie"): dial_up() x = (Time(datetime.datetime.utcnow()).mjd - max(Time(all_dictionary["Time_Stamp"]).mjd))*24 print("It has been " +str(x) +" hours since last update to gw mocks.") -# print("Uh oh... spaghetti-o's") + #print("Uh oh... spaghetti-o's") #Sorting pickle files by date created and by most correct version if pwd.getpwuid(os.getuid())[0] == 'realtime': - Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle')+ - glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*.pickle')) + Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle'))#+ + #glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*.pickle')) else: Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle')) -#Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle')) date_format = "%Y-%m-%d" - Quality_Pickles = [] for file in Pickle_Files: - c_timestamp = os.path.getmtime(file) - c_datestamp = datetime.datetime.fromtimestamp(c_timestamp) - if (c_datestamp >= datetime.datetime.strptime('2022-12-05', date_format)): - Quality_Pickles.append(file) + c_timestamp = os.path.getmtime(file) + c_datestamp = datetime.datetime.fromtimestamp(c_timestamp) + if (c_datestamp >= datetime.datetime.strptime('2022-12-05', date_format)): + Quality_Pickles.append(file) #Collecting the first maps created for each event for analysis First_Batch={'Trigger_Time': [], 'GCN_Alert': [], 'End_Time': [], - 'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [], - 'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []} + 'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [], + 'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []} if pwd.getpwuid(os.getuid())[0] == 'realtime': - First_Runs = sorted(glob.glob(path+'PickledMocks/*MS*-1-*.pickle')+ - glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*MS*-1-*.pickle')) + First_Runs = sorted(glob.glob(path+'PickledMocks/*MS*-1-*.pickle'))#+ + #glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*MS*-1-*.pickle')) else: First_Runs = sorted(glob.glob(path+'PickledMocks/*MS*-1-*.pickle')) -#First_Runs= sorted(glob.glob((path+'PickledMocks/*MS*-1-*.pickle'))) for file in First_Runs: - if os.path.exists(file): - with open(file, 'rb') as pickle_now: - Entries = pickle.load(pickle_now) - if int(Entries["Trigger_Time"]) == 59957: - continue - for Key in Entries.keys(): - if Key=="Ligo_Latency": - First_Batch['LVK_Latency'].append(Entries[Key]) - else: - First_Batch[Key].append(Entries[Key]) - First_Batch['Name'].append(file) + if os.path.exists(file): + with open(file, 'rb') as pickle_now: + Entries = pickle.load(pickle_now) + if int(Entries["Trigger_Time"]) == 59957: + continue + for Key in Entries.keys(): + if Key=="Ligo_Latency": + First_Batch['LVK_Latency'].append(Entries[Key]) + else: + First_Batch[Key].append(Entries[Key]) + First_Batch['Name'].append(file) + for file in First_Batch["Name"]: - date_format = "%Y-%m-%d" - c_timestamp = os.path.getmtime(file) - c_datestamp = datetime.datetime.fromtimestamp(c_timestamp) - First_Batch['Time_Stamp'].append(c_datestamp) + date_format = "%Y-%m-%d" + c_timestamp = os.path.getmtime(file) + c_datestamp = datetime.datetime.fromtimestamp(c_timestamp) + First_Batch['Time_Stamp'].append(c_datestamp) print('Finished loading 1st map latencies.') -#Breakdown full name of files created so only necessary parts are saved (i.e., the code for the event and number of the map) +#Breakdown full name of files created so only necessary parts are saved +# (i.e., the code for the event and number of the map) pieces = [string.split('-') for string in Quality_Pickles] Puzzle_Box = {} @@ -140,8 +139,8 @@ def dial_up(who="jessie"): names.append(key[-9:]+'-'+str(high_file)+'-'+Puzzle_Box[key][1][i].split('.')[0]) Last_Batch = {'Trigger_Time': [], 'GCN_Alert': [], 'End_Time': [], - 'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [], - 'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []} + 'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [], + 'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []} #Make keys in dictionary callable for file in answer: @@ -149,13 +148,15 @@ def dial_up(who="jessie"): with open(file, 'rb') as pickle_in: Brine = pickle.load(pickle_in) if int(Brine["Trigger_Time"]) == 59957: #This is January 13, 2023. - continue #There was an error causing multiple files to be falsely saved to this date. + #There was an error causing multiple files to be falsely saved to this date. + continue for Key in Brine.keys(): if Key=="Ligo_Latency": - Last_Batch['LVK_Latency'].append(Brine[Key]) + Last_Batch['LVK_Latency'].append(Brine[Key]) else: - Last_Batch[Key].append(Brine[Key]) + Last_Batch[Key].append(Brine[Key]) Last_Batch['Name'].append(file) + for file in Last_Batch["Name"]: date_format = "%Y-%m-%d" c_timestamp = os.path.getmtime(file) @@ -167,34 +168,35 @@ def dial_up(who="jessie"): Last_TS = [] def find_ts(Batch, TS_List): + split_names = [string.split('/') for string in Batch] - split_names = [string.split('/') for string in Batch] + split_bits = [] + prime_split = [] + final_bit = [] - split_bits = [] - prime_split = [] - final_bit = [] + for i in range(len(split_names)): + split_bits.append(split_names[i][-1]) - for i in range(len(split_names)): - split_bits.append(split_names[i][-1]) + middle_split = [string.split('.') for string in split_bits] - middle_split = [string.split('.') for string in split_bits] + for i in range(len(middle_split)): + prime_split.append(middle_split[i][-2]) - for i in range(len(middle_split)): - prime_split.append(middle_split[i][-2]) + chop = [string.split('_') for string in prime_split] - chop = [string.split('_') for string in prime_split] + for i in range(len(chop)): + final_bit.append(chop[i][-2]+'_'+chop[i][-1]) - for i in range(len(chop)): - final_bit.append(chop[i][-2]+'_'+chop[i][-1]) + for i in final_bit: + FTS=glob.glob(out_put+f'/202*{i}/TS_distribution.png') + if len(FTS)>0: + TS_List.append(True) + else: + TS_List.append(False) + + TS_List = np.array(TS_List) + return TS_List - for i in final_bit: - FTS=glob.glob(out_put+f'/202*{i}/TS_distribution.png') - if len(FTS)>0: - TS_List.append(True) - else: - TS_List.append(False) - TS_List = np.array(TS_List) - return TS_List First_TS = find_ts(Batch = First_Batch["Name"], TS_List = First_TS) Last_TS = find_ts(Batch = Last_Batch["Name"], TS_List = Last_TS) @@ -203,28 +205,29 @@ def find_ts(Batch, TS_List): outliers_tot = [] for item in range(len(First_Batch["IceCube_Latency"])): - if First_Batch["IceCube_Latency"][item]>=1200: - outliers_ice.append(First_Batch["Name"][item]) + if First_Batch["IceCube_Latency"][item]>=1200: + outliers_ice.append(First_Batch["Name"][item]) for item in range(len(First_Batch["Total_Latency"])): - if First_Batch["Total_Latency"][item]>=3000: - outliers_tot.append(First_Batch["Name"][item]) + if First_Batch["Total_Latency"][item]>=3000: + outliers_tot.append(First_Batch["Name"][item]) icecube_outliers = [] total_outliers = [] def outlier_name(outlier_list, outlier_names): - nb = [string.split('/') for string in outlier_list] + nb = [string.split('/') for string in outlier_list] - out_bit=[] + out_bit=[] - for i in range(len(nb)): - out_bit.append(nb[i][-1]) + for i in range(len(nb)): + out_bit.append(nb[i][-1]) - last_bit = [string.split('_') for string in out_bit] + last_bit = [string.split('_') for string in out_bit] + + for i in range(len(last_bit)): + outlier_names.append(last_bit[i][-2]) - for i in range(len(last_bit)): - outlier_names.append(last_bit[i][-2]) outlier_name(outlier_list = outliers_ice, outlier_names = icecube_outliers) outlier_name(outlier_list = outliers_tot, outlier_names = total_outliers) @@ -727,16 +730,14 @@ def outlier_name(outlier_list, outlier_names): plt.tight_layout() fig.savefig("Linear_Delta_T_TIMESTAMP.png") -###p value plots -import matplotlib as mpl -mpl.use('agg') -def make_bg_pval_dist(fontsize=15, lower_y_bound=-3.5): - # function to make pval dist. lower_y_bound arg gives the exponent to set the lower y-axis +###p value plot +def make_bg_pval_dist(fontsize=15, lower_y_bound=-3.5, load_all = False): + # function to make pval hist. lower_y_bound arg gives the exponent to set the lower y-axis # limit, e.g. 10^-3 all_maps_saved_pkl=sorted(glob.glob('/data/user/jthwaites/o4-mocks/*/*.pickle'))[::-1] saved_mock_pkl=[all_maps_saved_pkl[0]] - + for mock in all_maps_saved_pkl: event_name=mock.split('-')[-3] event_name=event_name.split('/')[-1] @@ -744,15 +745,22 @@ def make_bg_pval_dist(fontsize=15, lower_y_bound=-3.5): saved_mock_pkl.append(mock) all_mocks={} - print('Loading %i mocks (may take a while)'%(len(saved_mock_pkl))) + #if more than 2000 found - load most recent only (otherwise takes too long) + if len(saved_mock_pkl)>2000 and not load_all: + print('Loading most recent 2000 mocks (may take a while)') + saved_mock_pkl = saved_mock_pkl[0:2000] + else: + print('Loading %i mocks (may take a while)'%(len(saved_mock_pkl))) + i=0 for mock in saved_mock_pkl: with open(mock,'rb') as f: result=pickle.load(f) #print('skipped {}'.format(mock)) all_mocks[result['name']]=result['p'] + i+=1 print('Done loading mocks.') - mpl.rcParams.update({'font.size':fontsize}) + matplotlib.rcParams.update({'font.size':fontsize}) plt.figure(figsize = (10,6), dpi=300) #ax.tick_params(labelsize=fontsize) @@ -766,8 +774,8 @@ def make_bg_pval_dist(fontsize=15, lower_y_bound=-3.5): #uniform_bins=np.logspace(lower_y_bound,0.,int(abs(lower_y_bound*7))+1) #evenly spaced bins in logspace #plt.step(uniform_bins[1:], np.diff(uniform_bins), label = 'Uniform p-value expectation', lw = 3.) plt.step(p_x_vals[1:], np.diff(p_x_vals), label = 'Uniform p-value distribution', lw = 3.) - plt.plot([0.1,0.1], [10**lower_y_bound, 1e0],linestyle='dotted', label=f'{lt_10per*100.:.2f} \% of p-values $<$ 0.1') - plt.plot([0.01, 0.01], [10**lower_y_bound, 1e0], linestyle='dashed',label=f'{lt_1per*100.:.2f} \% of p-values $<$ 0.01') + plt.plot([0.1,0.1], [10**lower_y_bound, 1e0],linestyle='dotted', label=f'{lt_10per*100.:.2f}% of p-values $<$ 0.1') + plt.plot([0.01, 0.01], [10**lower_y_bound, 1e0], linestyle='dashed',label=f'{lt_1per*100.:.2f}% of p-values $<$ 0.01') plt.xscale('log') plt.yscale('log') diff --git a/fast_response/circular_templates/gw_gcn_template_high.txt b/fast_response/circular_templates/gw_gcn_template_high.txt index 5aa6293..2f1dcd8 100644 --- a/fast_response/circular_templates/gw_gcn_template_high.txt +++ b/fast_response/circular_templates/gw_gcn_template_high.txt @@ -14,9 +14,9 @@ assumes a binary merger scenario and accounts for known astrophysical priors, su distance, in the significance estimate [3]. track-like event(s) are found in spatial and temporal coincidence with the gravitational-wave -candidate calculated from the map circulated . This -represents an overall p-value of (sigma) from the generic transient search -and an overall p-value of (sigma) for the Bayesian search. These p-values +candidate calculated from the map circulated in the notice. This +represents an overall p-value of from the generic transient search +and an overall p-value of for the Bayesian search. These p-values measure the consistency of the observed track-like events with the known atmospheric backgrounds for this single map (not trials corrected for multiple GW events). The most probable multi-messenger source direction based on the neutrinos and GW skymap is RA , Dec degrees. diff --git a/fast_response/make_ontime_plots.py b/fast_response/make_ontime_plots.py index 6af9c41..680f07c 100644 --- a/fast_response/make_ontime_plots.py +++ b/fast_response/make_ontime_plots.py @@ -125,10 +125,18 @@ def make_rate_plots(time_window, run_table, query_events, dirname, season='neutr recstart= Time(rates['rec_start'],format='mjd',scale='utc') recstop = Time(rates['rec_stop' ],format='mjd',scale='utc') - online_str = 'OnlineL2Filter_16' if season == 'neutrino16' else 'OnlineL2Filter_17' + if season == 'neutrino16': + online_str = 'OnlineL2Filter_16' + muon_str='MuonFilter_13' + elif time_window[1].mjd > 60276.86875: + online_str='OnlineL2Filter_23' + muon_str='MuonFilter_23' + else: + online_str = 'OnlineL2Filter_17' + muon_str='MuonFilter_13' for rate, name, unit in [('IN_ICE_SIMPLE_MULTIPLICITY', 'In-Ice-Simple-Multiplicity', '(kHz)'), - ('MuonFilter_13', 'Muon Filter', '(Hz)'), + (muon_str, 'Muon Filter', '(Hz)'), (online_str, 'Online L2 Filter', '(Hz)')]: fig, ax = plt.subplots(figsize = (12,4)) time_series(ax, run_table, time_window, diff --git a/fast_response/plotting_utils.py b/fast_response/plotting_utils.py index 2950483..e31ac21 100644 --- a/fast_response/plotting_utils.py +++ b/fast_response/plotting_utils.py @@ -6,6 +6,24 @@ import meander def plot_zoom(scan, ra, dec, title, reso=3, var="pVal", range=[0, 6],cmap=None): + ''' + Make a zoomed-in skymap around a particular point (RA, decl) on the sky + + Parameters: + ----------- + scan: numpy array + Healpix map of values to be plotted (usually a skymap) + ra: float + Right Ascension value at the center of the plot (best-fit RA or source RA) + dec: float + Declination value at the center of the plot (best-fit dec or source dec) + title: str + Plot title to use + reso: float + Resolution (arcmins), default 3 + cmap: matplotlib colormap or None + colormap to use. if not set default is Seaborn "Blues" + ''' if cmap is None: pdf_palette = sns.color_palette("Blues", 500) cmap = mpl.colors.ListedColormap(pdf_palette) @@ -24,6 +42,20 @@ def plot_zoom(scan, ra, dec, title, reso=3, var="pVal", range=[0, 6],cmap=None): plot_labels(dec, ra, reso) def plot_color_bar(labels=[0.,2.,4.,6.], col_label=r"IceCube Event Time", range=[0,6], cmap=None, offset=-35): + ''' + Adds a color bar to an existing healpy map + + Parameters: + ----------- + labels: float list + list of points to be used (default [0., 2., 4., 6.]) + col_label: str + label for colorbar (default IceCube Event Time) + cmap: matplotlib colormap or None + colormap to use. if not set default is "Blues" + offset: int + offset value for colorbar's label. default is -35 + ''' fig = plt.gcf() ax = fig.add_axes([0.95, 0.2, 0.03, 0.6]) labels = labels @@ -37,7 +69,18 @@ def plot_color_bar(labels=[0.,2.,4.,6.], col_label=r"IceCube Event Time", range= cb.update_ticks() def plot_labels(src_dec, src_ra, reso): - """Add labels to healpy zoom""" + """ + Add labels to healpy zoom + + Parameters: + ----------- + src_dec: float + Declination value at the center of the plot (best-fit dec or source dec) + src_ra: float + Right Ascension value at the center of the plot (best-fit RA or source RA) + reso: float + Resolution (arcmins) + """ fontsize = 20 plt.text(-1*np.radians(1.75*reso),np.radians(0), r"%.2f$^{\circ}$"%(np.degrees(src_dec)), horizontalalignment='right', @@ -65,7 +108,38 @@ def plot_labels(src_dec, src_ra, reso): def plot_events(dec, ra, sigmas, src_ra, src_dec, reso, sigma_scale=5., col = 'k', constant_sigma=False, same_marker=False, energy_size=False, with_mark=True, with_dash=False, label=''): - """Adds events to a healpy zoom, get events from llh.""" + """ + Adds events to a healpy zoom plot. Events are expected to be from self.llh.exp + + Parameters: + ----------- + dec: float array + Array of declination values for each event + ra: float array + Array of Right Ascension values for each event + sigmas: float array + Angular error (circularized) to be plotted. Usually a 90% CL angular error + src_ra: float + Right Ascension value at the center of the plot (best-fit RA or source RA) + src_dec: float + Declination value at the center of the plot (best-fit dec or source dec) + reso: float + Resolution (arcmins) + sigma_scale: float or None + Value to rescale the sigma parameter (default 5.0). + If None: no angular undertainty is plotted (sometimes used for very long time windows) + col: str or str array + color to use for each event plotted (default k=black) + constant_sigma: bool + Ignores sigma parameter and plots all markers with a size of 20. + with_mark: bool + Uses an x marker instead of o + with_dash: bool + Plot the angular error as a dashed contour. + Usually used to indicated a removed event (e.g. alert event that triggered the analysis) + same_marker, energy_size: bool + Currently unused options. + """ cos_ev = np.cos(dec) tmp = np.cos(src_ra - ra) * np.cos(src_dec) * cos_ev + np.sin(src_dec) * np.sin(dec) dist = np.arccos(tmp) @@ -87,6 +161,10 @@ def plot_events(dec, ra, sigmas, src_ra, src_dec, reso, sigma_scale=5., col = 'k edgecolor=col, facecolor=col, s=60, alpha=1.0) def load_plotting_settings(): + ''' + Load settings to be used as default plot settings. + Includes Times New Roman font and size 12 font + ''' mpl.use('agg') mpl.rcParams['text.usetex'] = True try: @@ -117,6 +195,7 @@ def contour(ra, dec, sigma, nside): Array of sigma to make contours around events nside: nside of healpy map + Returns: -------- Theta: array @@ -159,6 +238,7 @@ def plot_contours(proportions, samples): samples: array array of values read in from healpix map E.g samples = hp.read_map(file) + Returns: -------- theta_list: list @@ -189,6 +269,27 @@ def plot_contours(proportions, samples): return theta_list, phi_list def make_public_zoom_skymap(skymap, events, ra, dec, with_contour=True, name='test'): + ''' + Make a zoomed skymap for public webpage (currently unused, under development) + + Parameters: + ----------- + skymap: array + Healpy probability skymap (plotted as a colorscale) + events: array + Array of events to plot (expected from self.llh.exp) + ra: float + Right Ascension value at the center of the plot (best-fit RA or source RA) + dec: float + Declination value at the center of the plot (best-fit dec or source dec) + with_contour: bool + Plot skymap 90% contour (default True) + name: str + Event name, to save in filename + + See also: + plot_zoom: full routine to plot zoomed skymap + ''' pdf_palette = sns.color_palette("Blues", 500) cmap = mpl.colors.ListedColormap(pdf_palette) diff --git a/fast_response/precomputed_background/glob_precomputed_trials.py b/fast_response/precomputed_background/glob_precomputed_trials.py index 62b13f6..e1ce7aa 100644 --- a/fast_response/precomputed_background/glob_precomputed_trials.py +++ b/fast_response/precomputed_background/glob_precomputed_trials.py @@ -25,7 +25,7 @@ def glob_allsky_scans(delta_t, rate, dir, low_stats=False): #n_per_job = {1000.: 1000, 172800.: 50, 2678400.: 30} #jobs_per_window = {1000.: 20, 172800.: 100, 2678400.: 100} - files = glob(dir+'/gw_{:.1f}_mHz_seed_*_delta_t_{:.1e}.npz'.format(rate, delta_t)) + files = glob(dir+'/gw_{:.1f}_mHz_delta_t_{:.1e}_seed_*.npz'.format(rate, delta_t)) nside = 512 npix = hp.nside2npix(nside) maps = sparse.csr_matrix((0, npix), dtype=float) @@ -54,8 +54,8 @@ def glob_allsky_scans(delta_t, rate, dir, low_stats=False): return maps -for rate in [6.0, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2]: - for low_stats in [False]:#[True, False]: +for rate in [6.0]:#, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2]: + for low_stats in [True, False]: print("Rate: {} mHz, low stats: {}".format(rate, low_stats)) maps = glob_allsky_scans(args.deltaT, rate, args.dir, low_stats=low_stats) del maps diff --git a/fast_response/precomputed_background/precompute_ts.py b/fast_response/precomputed_background/precompute_ts.py index 57e539a..e4b821d 100644 --- a/fast_response/precomputed_background/precompute_ts.py +++ b/fast_response/precomputed_background/precompute_ts.py @@ -21,7 +21,7 @@ parser.add_argument('--ntrials', type=int, default = 10000, help='Trials') parser.add_argument("--bkg", default=6.4, type=float, - help="Expected background rate in mHz (defualt 6.4)") + help="Expected background rate in mHz (default 6.4)") parser.add_argument('--seed', default=1, type=int, help='Unique seed for running on the cluster') parser.add_argument('--outdir',type=str, default=None, diff --git a/fast_response/precomputed_background/precompute_ts_gw.py b/fast_response/precomputed_background/precompute_ts_gw.py index fab4bbc..e9bd3ba 100644 --- a/fast_response/precomputed_background/precompute_ts_gw.py +++ b/fast_response/precomputed_background/precompute_ts_gw.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + r""" Run trials for background only, all-sky scans. No spatial prior is included; instead we record the @@ -10,7 +12,7 @@ import numpy as np import healpy as hp -import argparse, os +import argparse, os, time from astropy.time import Time from fast_response import GWFollowup @@ -37,30 +39,21 @@ args = p.parse_args() def config_llh(self, scramble = True): - """ Use full timescramble method for BG trials + ''' Use full timescramble method for BG trials This method is adapted from initialize_llh in FastResponseAnalysis.py - """ - - self.get_data() + ''' - print("Initializing Point Source LLH in Skylab") + print("Initializing Point Source LLH w/Timescramble in Skylab") assert self._float_index==True,\ 'Index should be fitted - check initialization' - if self._fix_index: - llh_model = EnergyLLH( - twodim_bins=[self.energy_bins, self.sinDec_bins], - allow_empty=True, - spectrum=PowerLaw(A=1, gamma=self.index, E0=1000.), - ncpu=self._ncpu) - elif self._float_index: - llh_model = EnergyLLH( - twodim_bins=[self.energy_bins, self.sinDec_bins], - allow_empty=True, - bounds=self._index_range, - seed = self.index, - ncpu=self._ncpu) + llh_model = EnergyLLH( + twodim_bins=[self.energy_bins, self.sinDec_bins], + allow_empty=True, + bounds=self._index_range, + seed = self.index, + ncpu=self._ncpu) box = TemporalModel( grl=self.grl, @@ -68,9 +61,6 @@ def config_llh(self, scramble = True): days=self._nb_days, signal=BoxProfile(self.start, self.stop)) - # should not be an extension here - src_extension = None - llh = PointSourceLLH( self.exp, # array with data events self.mc, # array with Monte Carlo events @@ -81,10 +71,10 @@ def config_llh(self, scramble = True): llh_model=llh_model, # likelihood model temporal_model=box, # use box for temporal model nsource_bounds=(0., 1e3), # bounds on fitted ns - src_extension = src_extension, # Model symmetric extended source + src_extension = None, # Model symmetric extended source nsource=1., # seed for nsignal fit ) - + llh._warn_nsignal_max=False return llh if args.outdir == None: @@ -100,10 +90,12 @@ def config_llh(self, scramble = True): gw_time = Time(60000., format='mjd') #2023-02-25 delta_t = float(args.deltaT) +delta_t_days = delta_t/86400. if args.deltaT==1000: start_mjd = gw_time - (delta_t / 86400. / 2.) stop_mjd = gw_time + (delta_t / 86400. / 2.) + else: #2 week followup start_mjd = gw_time - 0.1 stop_mjd = gw_time + 14. @@ -113,11 +105,13 @@ def config_llh(self, scramble = True): #skymap needed for initialization - not used here f = GWFollowup('precomputed_bg_test', '/data/user/jthwaites/o3-gw-skymaps/S191216ap.fits.gz', - start_iso, stop_iso) + start_iso, stop_iso, save=False) f._allow_neg = False f.llh = config_llh(f) + f.llh.nbackground=args.bkg +print('nside = {}'.format(f.nside)) ntrials = args.ntrials stop = ntrials * (args.seed+1) @@ -128,9 +122,11 @@ def config_llh(self, scramble = True): shape = (args.ntrials, npix) maps = sparse.lil_matrix(shape, dtype=float) for jj in range(ntrials): - val = f.llh.scan(0.0, 0.0, scramble=True, seed = seed_counter, + print('Starting scan {}'.format(jj)) + t1 = time.time() + val = f.llh.scan(0.0, 0.0, seed = seed_counter, scramble=True, #spatial_prior = f.spatial_prior, - time_mask = [delta_t / 2., (start_mjd + stop_mjd) / 2.], + time_mask = [delta_t_days / 2., (start_mjd.mjd + stop_mjd.mjd) / 2.], pixel_scan = [f.nside, f._pixel_scan_nsigma], inject = None) if val['TS'] is not None: @@ -138,11 +134,14 @@ def config_llh(self, scramble = True): results = np.empty((val['TS'].size,), dtype=dtype) pixels = hp.ang2pix(f.nside, np.pi/2. - val['dec'], val['ra']) maps[jj, pixels] = val['TS'] + t2 = time.time() + print("finished scan, took {} s".format(t2-t1)) seed_counter+=1 print("DONE") hp_sparse = maps.tocsr() outfilename = outdir+'gw_{:.1f}_mHz_delta_t_{:.1e}_seed_{}.npz'.format(args.bkg, args.deltaT, args.seed) + sparse.save_npz(outfilename, hp_sparse) print("Saved to {}".format(outfilename)) \ No newline at end of file diff --git a/fast_response/precomputed_background/submit_precomputed_trials.py b/fast_response/precomputed_background/submit_precomputed_trials.py index a3a3bd8..002ab54 100644 --- a/fast_response/precomputed_background/submit_precomputed_trials.py +++ b/fast_response/precomputed_background/submit_precomputed_trials.py @@ -29,6 +29,9 @@ parser.add_argument( '--outdir', default= default_outdir, type=str, help = 'Output directory. default is FAST_RESPONSE_OUTPUT (if set) or ./ if not') +parser.add_argument( + '--no_overwrite', type=int, default=1, + help='int to make sure previous trials are not overwritten (1=True, default)') args = parser.parse_args() print('Output trials to directory: {}'.format(args.outdir)) @@ -70,8 +73,8 @@ getenv=True, universe='vanilla', verbose=2, - request_cpus=8, - request_memory=8000, + request_cpus=8,#10, + request_memory=8000,#10000, extra_lines=[ 'should_transfer_files = YES', 'when_to_transfer_output = ON_EXIT'] @@ -79,6 +82,11 @@ for bg_rate in [6.0, 6.2, 6.4, 6.6, 6.8, 7.0, 7.2]: for seed in range(args.seed_start, int(args.ntrials/args.n_per_batch)+1): + if args.no_overwrite==1: + import glob + already_run = glob.glob(os.path.join(args.outdir, + 'trials/gw_{:.1f}_mHz_delta_t_{:.1e}_seed_{}.npz'.format(bg_rate, deltaT, seed))) + if len(already_run) != 0: continue job.add_arg('--deltaT {} --ntrials {} --seed {} --bkg {} --outdir {}'.format( deltaT, args.n_per_batch, seed, bg_rate, args.outdir)) diff --git a/fast_response/reports/ReportGenerator.py b/fast_response/reports/ReportGenerator.py index 80c57a9..722748a 100644 --- a/fast_response/reports/ReportGenerator.py +++ b/fast_response/reports/ReportGenerator.py @@ -104,7 +104,10 @@ def query_db_runs(self, time_window): # first query the rundata base with the times of the analysis # plus 8 hours on either side run_url = 'https://live.icecube.wisc.edu/run_info/' - run_query = {'user':'icecube', 'pass':'skua', + with open('/home/jthwaites/private/tokens/auth.txt') as f: + user = f.readline().rstrip('\n') + pasw = f.readline().rstrip('\n') + run_query = {'user':user, 'pass':pasw, 'start':(Time(time_window[0],precision=0) - TimeDelta(8*3600,format='sec')).iso, 'stop': (Time(time_window[1],precision=0) @@ -345,12 +348,19 @@ def generate_report(self): "}\n" ) + if self.source["stop_mjd"] > 60276.86875: + muonfilter_str = "MuonFilter_23_plot.png" + l2filter_str="OnlineL2Filter_23_plot.png" + else: + muonfilter_str="MuonFilter_13_plot.png" + l2filter_str="OnlineL2Filter_17_plot.png" + for plot_name, plot_path in [("gfurate", "GFU_rate_plot.png"), ('skymap', self.analysisid + "unblinded_skymap.png"), ('skymapzoom', self.analysisid + "unblinded_skymap_zoom.png"), ('limitdNdE', "central_90_dNdE.png"), - ('muonfilter', "MuonFilter_13_plot.png"), - ("Lfilter", "OnlineL2Filter_17_plot.png"), + ('muonfilter', muonfilter_str), + ("Lfilter", l2filter_str), ("badnessplot", "badness_plot.png"), ("multiplicity", "IN_ICE_SIMPLE_MULTIPLICITY_plot.png")]: if os.path.isfile(self.dirname + '/' + plot_path): diff --git a/fast_response/scripts/combine_results_kafka.py b/fast_response/scripts/combine_results_kafka.py index 5001ef2..25478c1 100644 --- a/fast_response/scripts/combine_results_kafka.py +++ b/fast_response/scripts/combine_results_kafka.py @@ -27,7 +27,7 @@ consumer = Consumer(client_id=client_id, client_secret=client_secret, - config={'max.poll.interval.ms':1200000}) + config={'max.poll.interval.ms':1800000}) # Subscribe to topics to receive alerts consumer.subscribe(['gcn.classic.voevent.LVC_PRELIMINARY', @@ -207,7 +207,7 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): return collected_results = {} - collected_results["$schema"]= "https://gcn.nasa.gov/schema/stable/gcn/notices/icecube/LvkNuTrackSearch.schema.json" + collected_results["$schema"]= "https://gcn.nasa.gov/schema/v3.0.0/gcn/notices/icecube/lvk_nu_track_search.schema.json" collected_results["type"]= "IceCube LVK Alert Nu Track Search" eventtime = record.find('.//ISOTime').text @@ -314,9 +314,10 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): logger.warning('Both analyses not finished after {:.0f} min wait.'.format(max_wait)) logger.warning('Not sending GCN.') if record.attrib['role']=='observation' and not heartbeat: + err_msg = '--missing_llama=True --missing_uml=True' if not subthreshold else '--missing_llama=True' try: subprocess.call(['/home/jthwaites/private/make_call.py', - '--troubleshoot_gcn=True', '--missing_llama=True', '--missing_uml=True']) + '--troubleshoot_gcn=True', err_msg]) except: logger.warning('Failed to send alert to shifters: Issue finding both results. ') return diff --git a/fast_response/sensitivity_utils.py b/fast_response/sensitivity_utils.py index 3ce18b1..ebf38c8 100644 --- a/fast_response/sensitivity_utils.py +++ b/fast_response/sensitivity_utils.py @@ -4,26 +4,76 @@ from scipy.stats import chi2 def find_nearest_idx(array, value): + ''' + Find the index where an array is nearest to a + given value + + Parameters: + ----------- + array: list or array + Array to check + value: float + Particular value to look for in an array + + Returns: + ---------- + Int: Index where array is closest to value + + See also: + ---------- + find_nearest: gives closest value in array, rather than index + ''' array = np.asarray(array) idx = (np.abs(array - value)).argmin() return idx def find_nearest(array, value): + ''' + Find the value of an array closest to a + given value + + Parameters: + ----------- + array: list or array + Array to check + value: float + Particular value to look for in an array + + Returns: + ---------- + Float: value of the array that is closest to the value given + + See also: + ---------- + find_nearest_idx: returns index of closest value in array, rather than value + ''' array = np.asarray(array) ind = (np.abs(array - value)).argmin() return array[ind] def deltaPsi(dec1, ra1, dec2, ra2): """ - Calculate angular distance. + Calculate angular distance between two given points + Values in Right Ascension/declination - Args: - dec1: Declination of first direction in radian - ra1: Right ascension of first direction in radian - dec2: Declination of second direction in radian - ra2: Right ascension of second direction in radian - - Returns angular distance in radian + Parameters: + ----------- + dec1: float + Declination of first direction in radian + ra1: float + Right ascension of first direction in radian + dec2: float + Declination of second direction in radian + ra2: float + Right ascension of second direction in radian + + Returns: + ----------- + Float: angular distance in radian + + See also: + ----------- + deltaPsi2: helper function called within this function """ return deltaPsi2(np.sin(dec1), np.cos(dec1), np.sin(ra1), np.cos(ra1), np.sin(dec2), np.cos(dec2), np.sin(ra2), np.cos(ra2)) @@ -31,17 +81,33 @@ def deltaPsi2(sDec1, cDec1, sRa1, cRa1, sDec2, cDec2, sRa2, cRa2): """ Calculate angular distance. - Args: - sDec1: sin(Declination of first direction) - cDec1: cos(Declination of first direction) - sRa1: sin(Right ascension of first direction) - cRa1: cos(Right ascension of first direction) - sDec2: sin(Declination of second direction) - cDec2: cos(Declination of second direction) - sRa2: sin(Right ascension of second direction) - cRa2: cos(Right ascension of second direction) - - Returns angular distance in radian + Parameters: + ----------- + sDec1: float + sin(Declination of first direction) + cDec1: float + cos(Declination of first direction) + sRa1: float + sin(Right ascension of first direction) + cRa1: float + cos(Right ascension of first direction) + sDec2: float + sin(Declination of second direction) + cDec2: float + cos(Declination of second direction) + sRa2: float + sin(Right ascension of second direction) + cRa2: float + cos(Right ascension of second direction) + + Returns: + --------- + Float: angular distance in radian + + See also: + ---------- + deltaPsi: this function is the wrapper to call this one. + Users should directly call deltaPsi. """ tmp = cDec1*cRa1*cDec2*cRa2 + cDec1*sRa1*cDec2*sRa2 + sDec1*sDec2 tmp[tmp>1.] = 1. @@ -51,7 +117,35 @@ def deltaPsi2(sDec1, cDec1, sRa1, cRa1, sDec2, cDec2, sRa2, cRa2): def sensitivity_fit(signal_fluxes, passing, errs, fit_func, p0 = None, conf_lev = 0.9): r''' Given an array of injected fluxes (or events) and passing fraction - relative to the unblinded TS, do a fit given a CDF + relative to the unblinded TS, do a fit given a CDF. + Built on curve_fit from scipy.optimize. + + Parameters: + ----------- + signal_fluxes: float Numpy array + set of injected flux values (or signal events). + passing: float Numpy array + corresponding fraction of trials with TS > threshold TS value, for each signal flux value. + errs: float Numpy array + binomial error for the given passing values. See also: binomial_error + fit_func: function + function to use in the fit. + See also: Error function (erfunc), Chi squared (chi2cdf), Incomplete gamma (incomplete_gamma) + p0: None, scalar or N-length sequence + initial guess for parameters, passed directly to scipy.optimize.curve_fit + conf_lev: float + confidence level used (default 0.9 = 90% CL) + + Returns: + ---------- + dict of fit values: + popt, pcov: returned fit values from scipy.optimize.curve_fit + chi2, dof, pval: chi squared, degrees of freedom, and p-value from fit + xfit, yfit: arrays of values, using the fitted parameters of the given function + name: name of fit used + ls: linestyle, returns -- + sens: calculated sensitivity value, in flux or ns (whichever is passed in signal_fluxes) + ''' try: name = fit_func.__name__ @@ -71,22 +165,40 @@ def sensitivity_fit(signal_fluxes, passing, errs, fit_func, p0 = None, conf_lev 'name': name, 'pval':pval, 'ls':'--', 'sens': sens} def erfunc(x, a, b): + '''Error function''' x = np.array(x) return 0.5 + 0.5*sp.special.erf(a*x + b) def chi2cdf(x, df1, loc, scale): + '''Chi-squared CDF''' func = chi2.cdf(x,df1,loc,scale) return func def incomplete_gamma(x, a, scale): + '''Incomplete gamma function''' x = np.array(x) return sp.special.gammaincc( scale*x, a) def poissoncdf(x, mu, loc): + '''Poisson CDF function''' func = sp.stats.poisson.cdf(x, mu, loc) return func def binomial_error(p, number): + ''' + Calculates binomial errors for given passing fraction values + + Parameters: + ----------- + p: float Numpy array + passing fraction values + number: int Numpy array + number of signal trials run for each injected value + + Returns: + ---------- + float Numpy array of calculated binomial errors + ''' errs = np.sqrt(p*(1.-p) / number) ntrig = p * number bound_case_pass = (ntrig + (1./3.)) / (number + (2./3.)) diff --git a/fast_response/web_utils.py b/fast_response/web_utils.py index 1325d76..ddfa185 100644 --- a/fast_response/web_utils.py +++ b/fast_response/web_utils.py @@ -34,16 +34,20 @@ mpl.rcParams['ytick.labelsize'] = 16 mpl.rcParams['xtick.major.size'] = 5 mpl.rcParams['ytick.major.size'] = 5 -############################# Plotting Parameters ############################# +############################################################################### def updateFastResponseWeb(analysis, gw=False): r''' Create analysis specific page, and update plots with information from all analyses + Parameters: ----------- - analysis: FastResponseAnalysis instance + analysis: dict + Dictionary of FastResponseAnalysis results + gw: bool + Indicate if event is a GWFollowup (uses GW-specific results page and dataframe). Default False ''' updateDataFrame(analysis, gw=gw) createFastResponsePage(analysis, gw=gw) @@ -52,14 +56,21 @@ def updateFastResponseWeb(analysis, gw=False): else: updateFastResponseTable(analysis) updateFastResponsePlots(gw=gw) - # sync_to_roc() def updateDataFrame(analysis, gw=False, make_df=False): r''' Read in official Fast Response Dataframe, - add these results, save - gw option specifies to read in GW DataFrame - make_df allows to make a new dataframe with specific keys in same format + add these results, save DataFrame + + Parameters: + ----------- + analysis: dict + Dictionary of FastResponseAnalysis results + gw: bool + Indicate if event is a GWFollowup (uses GW-specific dataframe). Default False + make_df: bool + Make a new dataframe from scratch. Default False + NOTE: this will overwrite any existing Dataframe in the expected path. ''' base_path = os.path.dirname(fast_response.__file__) if make_df: @@ -82,11 +93,9 @@ def updateDataFrame(analysis, gw=False, make_df=False): dec = np.nan if 'dec' not in analysis.keys() else analysis['dec'] * 180. / np.pi ra = np.nan if 'ra' not in analysis.keys() else analysis['ra'] * 180. / np.pi ext = 0.0 if 'extension' not in analysis.keys() else analysis['extension'] * 180. / np.pi - upper_lim = np.nan if 'upper_limit' not in analysis.keys() else analysis['upper_limit'] - #note that this upper_lim will be np.nan for any skymap followup, because then we send out a sens_range - - #low_en = np.nan if 'low_en' not in analysis.keys() else analysis['low_en'] - #high_en = np.nan if 'high_en' not in analysis.keys() else analysis['high_en'] + + #NOTE: this upper_lim will be np.nan for any skymap followup, because then we send out a sens_range + upper_lim = np.nan if 'upper_limit' not in analysis.keys() else analysis['upper_limit'] if gw: new_list = [analysis['p'], pd.Timestamp(Time(analysis['start'], format='mjd').iso), @@ -94,6 +103,7 @@ def updateDataFrame(analysis, gw=False, make_df=False): analysis['ns'], analysis['ts'], analysis['gamma'], analysis['fit_ra'], analysis['fit_dec'], analysis['energy_range'], analysis['sens_range']] else: + if 'energy_range' not in analysis.keys(): analysis['energy_range']= (None, None) new_list = [ra, dec, analysis['p'], analysis['ns'], pd.Timestamp(Time(analysis['start'], format='mjd').iso), pd.Timedelta(analysis['stop'] - analysis['start'], unit='day'), @@ -111,10 +121,13 @@ def updateDataFrame(analysis, gw=False, make_df=False): def createFastResponsePage(analysis, gw=False): r''' Create analysis specific page + Parameters: ----------- - analysis: FastResponseAnalysis results from pickle file - gw: specifier if analysis is for a GW followup + analysis: dict + Dictionary of FastResponseAnalysis results + gw: bool + Indicate if event is a GWFollowup (uses GW-specific webpage). Default False ''' new_f = [] keypairs = [('ANALYSISTS', 'ts'), ('ANALYSISNS', 'ns'), ('ANALYSISP', 'p')] @@ -127,6 +140,15 @@ def createFastResponsePage(analysis, gw=False): if k in line: line = line.replace(k, '{:.3f}'.format(analysis[r])) new_f.append(line) + + #Filter names changed with 2023 runstart + if analysis["stop"] > 60276.86875: + muonfilter_str = "MuonFilter_23_plot.png" + l2filter_str="OnlineL2Filter_23_plot.png" + else: + muonfilter_str="MuonFilter_13_plot.png" + l2filter_str="OnlineL2Filter_17_plot.png" + for i in range(len(new_f)): if 'ANALYSISCREATIONTIME' in new_f[i]: new_f[i] = new_f[i].replace('ANALYSISCREATIONTIME', str(datetime.datetime.utcnow())[:-7]) @@ -150,6 +172,11 @@ def createFastResponsePage(analysis, gw=False): new_f[i] = new_f[i].replace('ANALYSISID', analysis['analysisid']) if 'ANALYZER' in new_f[i]: new_f[i] = new_f[i].replace('ANALYZER', username) + if 'MUONFILTER' in new_f[i]: + new_f[i] = new_f[i].replace('MUONFILTER', muonfilter_str) + if 'L2FILTER' in new_f[i]: + new_f[i] = new_f[i].replace('L2FILTER', l2filter_str) + if gw: if 'webpage' in new_f[i]: new_f[i] = new_f[i].replace('webpage', 'gw-webpage') @@ -168,11 +195,18 @@ def createFastResponsePage(analysis, gw=False): def updateFastResponseTable(analysis): r''' - Push information from this analysis to overall + Push information from this analysis to summary webpage tables, include new entries where applicable + This uses the general FastResponse webpage (not GW) + Parameters: ----------- - analysis: FastResponseAnalysis results from pickle file + analysis: dict + Dictionary of FastResponseAnalysis results + + See also: + ----------- + updateGWTable: GWFollowup equivalent routine ''' dec = '-' if 'dec' not in analysis.keys() else '{:+.2f}'.format(analysis['dec'] * 180. / np.pi) ra = '-' if 'ra' not in analysis.keys() else '{:.2f}'.format(analysis['ra'] * 180. / np.pi) @@ -206,11 +240,18 @@ def updateFastResponseTable(analysis): def updateGWTable(analysis): r''' - Push information from this analysis to summary tables - on GW followup webpage + Push information from this analysis to summary + GWFollowup webpage tables, include new entries where applicable + This uses the the GWFollowup webpage + Parameters: ----------- - analysis: GW FastResponseAnalysis results from pickle file + analysis: dict + Dictionary of FastResponseAnalysis results for a GWFollowup + + See also: + ----------- + updateFastResponseTable: Alert or External Followup equivalent routine ''' dec = '-' if analysis['ts']<=0. else '{:+.2f}'.format(analysis['fit_dec'] * 180. / np.pi) ra = '-' if analysis['ts']<=0. else '{:.2f}'.format(analysis['fit_ra'] * 180. / np.pi) @@ -249,8 +290,12 @@ def updateGWTable(analysis): def updateFastResponsePlots(gw=False): r''' - Update overview plots of all analyses (timing, - p-value distribution, etc.) + Update overview p-value distribution for all analyses + + Parameters: + gw: bool + Indicate if event is a GWFollowup (makes p-val distribution for all GW followups only). + Default False. ''' base_path = os.path.dirname(fast_response.__file__) @@ -292,13 +337,21 @@ def updateGW_public(analysis, circular = None): r''' Push information from this analysis to summary tables on GW PUBLIC followup webpage + Parameters: ----------- - analysis: dictionary of results from UML and LLAMA analyses - circular: GCN circular number, to link to circular rather than notice + analysis: dict + Dictionary of results from UML and LLAMA analyses + (should be read from the GCN Notice sent with results of both) + circular: int + GCN circular number, which links to circular rather than the notice + + See also: + ----------- + createGWEventPage: creates event-specific webpage, if neutrinos are sent + update_internal_public: equivalent routine for alert event followup ''' - #duration = (Time(analysis['observation_stop'], format='iso').mjd - Time(analysis['observation_start'],format = 'iso').mjd)*86400. duration = analysis['observation_livetime'] if 'analysisid' not in analysis.keys(): start_date = Time(dateutil.parser.parse(analysis['observation_start'])).datetime @@ -373,10 +426,14 @@ def updateGW_public(analysis, circular = None): def createGWEventPage(analysis): r''' - Create PUBLIC webpage with neutrino information, if sent via GCN + Create event-specific PUBLIC webpage with neutrino information, + for a GW where neutrino information is sent over GCN (p<10%) + Parameters: ----------- - analysis: dictionary of results from UML and LLAMA analyses + analysis: dict + Dictionary of results from UML and LLAMA analyses + (should be read from the GCN Notice sent with results of both) ''' new_f = [] base_path = os.path.dirname(fast_response.__file__) @@ -404,15 +461,11 @@ def createGWEventPage(analysis): new_f[i] = new_f[i].replace('UMLPVAL', '{}'.format(analysis['pval_generic'])) except: new_f[i] = new_f[i].replace('UMLPVAL', 'nan') - #if 'UMLSIG' in new_f[i]: - # new_f[i] = new_f[i].replace('UMLSIG', '{:.2f}'.format(analysis['overall_sig_gen_transient'])) if 'LLAMAPVAL' in new_f[i]: try: new_f[i] = new_f[i].replace('LLAMAPVAL', '{}'.format(analysis['pval_bayesian'])) except: new_f[i] = new_f[i].replace('LLAMAPVAL', 'nan') - #if 'LLAMASIG' in new_f[i]: - # new_f[i] = new_f[i].replace('LLAMASIG', '{:.2f}'.format(analysis['overall_sig_bayesian'])) if 'MOSTPROBDIR' in new_f[i]: if 'most_probable_direction' in analysis.keys(): new_f[i] = new_f[i].replace('MOSTPROBDIR', 'RA: {} deg, decl.: {} deg'.format( @@ -464,12 +517,22 @@ def createGWEventPage(analysis): def update_internal_public(analysis_1000, analysis_2d, alert_gcn=None, fra_circular=None): r''' Push information from this analysis to summary tables - on PUBLIC FRA followup webpage + on PUBLIC Alert Followup webpage + Parameters: ----------- - analysis: dictionary of results - alert_gcn: either a single int (Circular number) or tuple (run id, event id) if linking to notice - fra_circular: GCN circular number for FRA followup + analysis_1000: dict + FastResponseAnalysis results for a 1000 sec time window + analysis_2d: dict + FastResponseAnalysis results for a 2 day time window + alert_gcn: int or tuple of ints + Either a single int (Circular number) or tuple (run id, event id) if linking to notice + fra_circular: int + GCN circular sent for FRA followup + + See also: + ----------- + updateGW_public: equivalent routine for GWFollowup ''' event_name = analysis_2d['name'].replace(' 1.7e+05 s','') cascade=True if 'Cascade' in event_name else False @@ -542,15 +605,14 @@ def update_internal_public(analysis_1000, analysis_2d, alert_gcn=None, fra_circu else: f.write(line) -#def sync_to_roc(): -# #subprocess.Popen('rsync -a /home/apizzuto/public_html/FastResponse/webpage/ apizzuto@roc.icecube.wisc.edu:/mnt/roc/www/internal/fast_response') -# env = dict(os.environ) -# subprocess.call(['rsync', '-a', f'/home/{username}/public_html/FastResponse/webpage/', -# f'{username}@roc.icecube.wisc.edu:/mnt/roc/www/internal/fast_response'], -# env = env -# ) - def format_dec_str(dec): + ''' + Add a + before declination value if decl > 0. + + Parameters: + dec: float + Declination value + ''' if dec < 0: return str(dec) else: diff --git a/html/analysis_base.html b/html/analysis_base.html index 44253cf..f078078 100644 --- a/html/analysis_base.html +++ b/html/analysis_base.html @@ -44,10 +44,10 @@

Detector Status Plots

- + - + diff --git a/setup.py b/setup.py index 88a9ff8..a77be8f 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ import setuptools long_message = 'Fast Response Analysis' -version = "1.1.4" +version = "1.2.0" setuptools.setup( name="fast_response",