Skip to content

Commit

Permalink
O4 realtime (#48)
Browse files Browse the repository at this point in the history
add fixes for beginning of o4 run
  • Loading branch information
jessiethw authored Jun 15, 2023
1 parent d0b67d5 commit 39db75d
Show file tree
Hide file tree
Showing 20 changed files with 1,005 additions and 439 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,4 @@ fast_response/results_dataframe_bkup.pkl
fast_response/scripts/make_call.py
cascade_skymaps/
*.png
fast_response/test_plotting*
4 changes: 2 additions & 2 deletions fast_response/AlertFollowup.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,9 @@ def write_circular(self, alert_results):
analysis_1000['gcn_num']=str(analysis_1000['gcn_num'])

if len(analysis_1000['gcn_num'])>6:
gcn_link='notices_amon_icecube_cascade/'+analysis_1000['gcn_num']+'.amon'
gcn_link='https://gcn.gsfc.nasa.gov/notices_amon_icecube_cascade/'+analysis_1000['gcn_num']+'.amon'
else:
gcn_link= 'gcn3/'+ analysis_1000['gcn_num'] +'.gcn3'
gcn_link= 'https://gcn.nasa.gov/circulars/'+ analysis_1000['gcn_num']

if 'coincident_events' not in analysis_1000.keys():
analysis_1000['coincident_events'] = []
Expand Down
28 changes: 24 additions & 4 deletions fast_response/FastResponseAnalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import matplotlib.pyplot as plt
from astropy.time import Time
from scipy.special import erfinv
from matplotlib.lines import Line2D

from skylab.datasets import Datasets
from skylab.llh_models import EnergyLLH
Expand Down Expand Up @@ -443,8 +444,11 @@ def plot_ontime(self, with_contour=False, contour_files=None):
prior or a zoomed in version like traditional fast response
followups
'''
self.plot_skymap_zoom(with_contour=with_contour, contour_files=contour_files)
self.plot_skymap(with_contour=with_contour, contour_files=contour_files)
try:
self.plot_skymap_zoom(with_contour=with_contour, contour_files=contour_files)
except:
print('Failed to make skymap zoom plot')
self.plot_skymap(with_contour=with_contour, contour_files=contour_files)

def plot_skymap_zoom(self, with_contour=False, contour_files=None):
r'''Make a zoomed in portion of a skymap with
Expand Down Expand Up @@ -557,8 +561,20 @@ def plot_skymap(self, with_contour=False, contour_files=None):
else:
skymap = self.skymap
max_val = max(skymap)
min_val = min(skymap)
moll_cbar = True if self.skymap is not None else None
hp.mollview(skymap, coord='C', cmap=cmap, rot=180, cbar=moll_cbar)

try:
hp.mollview(skymap, coord='C', cmap=cmap, rot=180, cbar=moll_cbar)
except Exception as e:
if min_val<1.0e-16:
#for some reason, sometimes have an underflow issue here
skymap[skymap<1.0e-16] = 0.
else:
print(e)
print('Failed to make all-sky unblinded skymap. Retry making full skymap! ')
return

plt.text(2.0,0., r"$0^\circ$", ha="left", va="center")
plt.text(1.9,0.45, r"$30^\circ$", ha="left", va="center")
plt.text(1.4,0.8, r"$60^\circ$", ha="left", va="center")
Expand All @@ -581,7 +597,9 @@ def plot_skymap(self, with_contour=False, contour_files=None):
sigma_90 = events['sigma']*self._angScale

# plot events on sky with error contours
handles=[]
hp.projscatter(theta,phi,c=cols,marker='x',label='GFU Event',coord='C', zorder=5)
handles.append(Line2D([0], [0], marker='x', ls='None', label='GFU Event'))

if (self.stop - self.start) <= 0.5: #Only plot contours if less than 2 days
for i in range(events['ra'].size):
Expand All @@ -595,6 +613,7 @@ def plot_skymap(self, with_contour=False, contour_files=None):
src_phi = self.ra
hp.projscatter(src_theta, src_phi, c = 'k', marker = '*',
label = self.name, coord='C', s=350)
handles.append(Line2D([0], [0], marker='*', c='k', ls='None', label=self.name))

if contour_files is not None:
cont_ls = ['solid', 'dashed']
Expand All @@ -614,12 +633,13 @@ def plot_skymap(self, with_contour=False, contour_files=None):
levels = [0.9]
theta, phi = plotting_utils.plot_contours(levels, probs)
hp.projplot(theta[0], phi[0], linewidth=2., c='k', label='Skymap (90\% cont.)')
handles.append(Line2D([0], [0], lw=2, c='k', label=r"Skymap (90\% cont.)"))
for i in range(1, len(theta)):
hp.projplot(theta[i], phi[i], linewidth=2., c='k')

# plt.title('Fast Response Skymap')
plt.title(self.name.replace('_', ' '))
plt.legend(loc=1)
plt.legend(loc=1, handles=handles)
plt.savefig(self.analysispath + '/' + self.analysisid + 'unblinded_skymap.png',bbox_inches='tight')
plt.close()

Expand Down
54 changes: 48 additions & 6 deletions fast_response/GWFollowup.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from .FastResponseAnalysis import PriorFollowup
from .reports import GravitationalWaveReport
from skylab.datasets import Datasets
from skylab.llh_models import EnergyLLH
from skylab.spectral_models import PowerLaw
from skylab.temporal_models import BoxProfile, TemporalModel
Expand Down Expand Up @@ -106,20 +107,54 @@ def run_background_trials(self, month=None, ntrials=1000):
self.tsd = max_ts
return max_ts

def check_events_after(self):
'''check for events after the on-time window.
This is to make sure we have all data from i3Live before running.
Returns a bool:
check_passed = True if there is at least 1 event after tw
check_passed = False if there are no events after (should re-load!) '''

t1 = Time(datetime.datetime.utcnow()).mjd
if ((t1-self.stop)*86400.)>1000.:
#if it's been long enough, only load 1000s
print('Loading 1000s of data after the time window')
t1 = self.stop + 1000./86400.
exp_long, livetime_long, grl_long = self.dset.livestream(
self.start,
t1,
load_mc=False,
floor=self._floor
)

mask = (exp_long['time']>self.stop)

check_passed = False
if exp_long[mask].size > 0:
check_passed = True
print('Found {} events after end of time window'.format(exp_long[mask].size))
return check_passed

def initialize_llh(self, skipped=None, scramble=False):
'''
Grab data and format it all into a skylab llh object
This function is very similar to the one in FastResponseAnalysis.py,
with the main difference being that these are low-latency enough that we may
have to wait for the last min of data to reach i3live.
Initialize LLH, load ontime data, then add temporal info and ontime data to llh
if so, initialize LLH, load ontime data, then add temporal info and ontime data to llh
'''
t0 = Time(datetime.datetime.utcnow()).mjd

if self.stop > t0 + 60./86400.:
self.get_data(livestream_start=self.start-6., livestream_stop=self.start)
print('Loading off-time data')
elif self.exp is None:
dset = Datasets[self.dataset]
self.dset = dset
check_passed = False
print('Checking for events after time window')
while not check_passed:
check_passed = self.check_events_after()

self.get_data()

if self._verbose:
Expand Down Expand Up @@ -165,9 +200,14 @@ def initialize_llh(self, skipped=None, scramble=False):
seed=self.llh_seed)

if self.stop > t0 + 60./86400.:
check_passed = False
print('Checking for events after time window')
while not check_passed:
check_passed = self.check_events_after()

exp_on, livetime_on, grl_on = self.dset.livestream(
self.start,
self.stop+60./86400.,
self.stop,
load_mc=False,
floor=self._floor,
wait_until_stop=True)
Expand Down Expand Up @@ -292,7 +332,8 @@ def write_circular(self):
else:
significance = '{:1.2f}'.format(self.significance(pvalue))

info = ' <dt> <ra> <dec> <angErr> <p_gwava> <p_llama>\n'
#info = ' <dt> <ra> <dec> <angErr> <p_gwava> <p_llama>\n'
info = ' <dt>\t\t <ra>\t\t <dec>\t\t <angErr>\t\t\t\t <p_gwava>\t\t\t\t\t <p_llama>\n'
table = ''
n_coincident_events=0
for event in events:
Expand Down Expand Up @@ -431,10 +472,11 @@ def find_coincident_events(self):
exp_theta = 0.5*np.pi - self.llh.exp['dec']
exp_phi = self.llh.exp['ra']
exp_pix = hp.ang2pix(self.nside, exp_theta, exp_phi)
overlap = np.isin(exp_pix, self.ipix_90)

t_mask=(self.llh.exp['time'] <= self.stop) & (self.llh.exp['time'] >= self.start)
events = self.llh.exp[t_mask]
ontime_pix = hp.ang2pix(self.nside, 0.5*np.pi - events['dec'], events['ra'])
overlap = np.isin(ontime_pix, self.ipix_90)

events = append_fields(
events, names=['in_contour', 'ts', 'ns', 'gamma', 'B'],
Expand Down Expand Up @@ -606,8 +648,8 @@ def generate_report(self):
r'''Generates report using class attributes
and the ReportGenerator Class'''
report = GravitationalWaveReport(self)
if self.duration > 1.:
report._figure_list = [('decPDF', 'decPDF.png'),('ts_contours', 'ts_contours.png')]
#if self.duration > 1.:
# report._figure_list = [('decPDF', 'decPDF.png'),('ts_contours', 'ts_contours.png')]

report.generate_report()
report.make_pdf()
66 changes: 41 additions & 25 deletions fast_response/MonitoringAndMocks/Data_Display.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
matplotlib.rcParams['ytick.labelsize'] = 16
import matplotlib.pyplot as plt
import numpy as np
import pickle, glob, os, datetime
import pickle, glob, os, datetime, pwd
from datetime import date
from statistics import median
#from statistics import mode
Expand All @@ -18,20 +18,25 @@
#from IPython.display import HTML
#from IPython.display import display
import subprocess
import warnings
warnings.filterwarnings('ignore', module='astropy._erfa')

def dial_up(who="jessie"):
cell_tower = "/cvmfs/icecube.opensciencegrid.org/users/jthwaites/"
#halp = "https://icecube.wisc.edu/~jthwaites/FastResponse/error_call.xml"
subprocess.call([cell_tower+"make_call.py", f"--{who}=True", '--troubleshoot=True'])#, "--call_file", halp])
#print([cell_tower+"make_call.py", f"--{who}=True", '--troubleshoot=True'])

#path = "/data/user/mromfoe/software/fastresponse/output/"
path = "/data/user/jthwaites/FastResponseAnalysis/output/"

#path = "/data/user/jthwaites/FastResponseAnalysis/output/"
path=os.environ.get('FAST_RESPONSE_OUTPUT')
out_put = '/data/user/jthwaites/o4-mocks/'

#Creating readable (and callable) files from ALL pickle files previously created in gw_gcn_listener
Pickle_to_text = sorted(glob.glob(path+'PickledMocks/*MS*.pickle'))
if pwd.getpwuid(os.getuid())[0] == 'realtime':
Pickle_to_text = sorted(glob.glob(path+'PickledMocks/*MS*.pickle')+
glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*MS*.pickle'))
else:
Pickle_to_text = sorted(glob.glob(path+'PickledMocks/*MS*.pickle'))

all_dictionary = {'Trigger_Time': [], 'GCN_Alert': [], 'End_Time': [],
'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [],
Expand Down Expand Up @@ -66,7 +71,12 @@ def dial_up(who="jessie"):
# print("Uh oh... spaghetti-o's")

#Sorting pickle files by date created and by most correct version
Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle'))
if pwd.getpwuid(os.getuid())[0] == 'realtime':
Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle')+
glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*.pickle'))
else:
Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle'))
#Pickle_Files = sorted(glob.glob(path+'PickledMocks/*.pickle'))

date_format = "%Y-%m-%d"

Expand All @@ -81,8 +91,13 @@ def dial_up(who="jessie"):
#Collecting the first maps created for each event for analysis
First_Batch={'Trigger_Time': [], 'GCN_Alert': [], 'End_Time': [],
'LVK_Latency': [], 'IceCube_Latency': [], 'Total_Latency': [],
'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []}
First_Runs= sorted(glob.glob((path+'PickledMocks/*MS*-1-*.pickle')))
'We_had_to_wait:': [], 'Name': [], 'Time_Stamp': []}
if pwd.getpwuid(os.getuid())[0] == 'realtime':
First_Runs = sorted(glob.glob(path+'PickledMocks/*MS*-1-*.pickle')+
glob.glob('/data/user/jthwaites/FastResponseAnalysis/output/PickledMocks/*MS*-1-*.pickle'))
else:
First_Runs = sorted(glob.glob(path+'PickledMocks/*MS*-1-*.pickle'))
#First_Runs= sorted(glob.glob((path+'PickledMocks/*MS*-1-*.pickle')))

for file in First_Runs:
if os.path.exists(file):
Expand Down Expand Up @@ -356,27 +371,27 @@ def outlier_name(outlier_list, outlier_names):
fig.savefig("LVK_Latency.png")
plt.close()

from PIL import Image, ImageDraw, ImageFont

img = Image.new('RGB', (370, 20), "white")

d1 = ImageDraw.Draw(img)

#fontname = "/data/user/mromfoe/software/fastresponse/fast_response/MonitoringAndMocks/A101HLVN.ttf"
fontname = "/home/mromfoe/public_html/O4_followup_monitoring/A101HLVN.ttf"
fontsize = 16

MyFont = ImageFont.truetype(fontname, fontsize)

#changing to matplotlib, to avoid more imports
#from PIL import Image, ImageDraw, ImageFont
#img = Image.new('RGB', (370, 20), "white")
#d1 = ImageDraw.Draw(img)
#fontname = "/home/mromfoe/public_html/O4_followup_monitoring/A101HLVN.ttf"
#fontsize = 16
#MyFont = ImageFont.truetype(fontname, fontsize)
now = Time(datetime.datetime.utcnow()).iso

d1.text((2, 0), "Page Last Updated: {} UTC".format(now),
fill = (0, 0, 0), font = MyFont)

#d1.text((2, 0), "Page Last Updated: {} UTC".format(now),
# fill = (0, 0, 0), font = MyFont)
save_path='/home/mromfoe/public_html/O4_followup_monitoring/Update_Time.png'
#img.save(save_path)
#img.save("Update_Time.png")

fig = plt.figure(figsize=(3.70, .20))
ax = fig.add_axes([0, 0, 1, 1])
plt.plot([0,5],[0,100],color='white')
ax.text(-0.1, 15, "Page Last Updated: {} UTC".format(now))
plt.savefig(save_path)

img.save(save_path)
img.save("Update_Time.png")
###
fig, axs = plt.subplots(figsize = (10, 7))
plt.xlabel("Latency in Seconds")
Expand Down Expand Up @@ -643,6 +658,7 @@ def outlier_name(outlier_list, outlier_names):
plt.title("Reports per Day")
plt.xlabel("Date")
plt.ylabel("Number of Reports")
plt.ylim([0,70])

save_path='/home/mromfoe/public_html/O4_followup_monitoring/ReportsPerDay_liveupdate.png'

Expand Down
5 changes: 4 additions & 1 deletion fast_response/MonitoringAndMocks/run_continuous.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,8 @@ do
echo $(date -u) Starting to update plots
python Data_Display.py
echo $(date -u) Plots Updated OK
sleep 3600

current=$(date -d $(date -d "today" +%H:%M) +%s)
target=$(date -d $(date -d "$(date -d "today + 1 hour" +%H):30" +%H:%M) +%s)
sleep $(( $target-$current ))
done
Loading

0 comments on commit 39db75d

Please sign in to comment.