Skip to content

Commit

Permalink
O4 realtime (#49)
Browse files Browse the repository at this point in the history
* add slackbot to post link when done

* add heartbeat option and post gcn to slack

* add link to gcn docs

* labeling events for easy pval comp

* add bartos et al

* round duration

* correct event name for cascades

* run scrambled on subthresh, post reports

* multiple channels for gcn if high signficance

* updating public webpages

* document if on realtime

* don't label events if not 1000s tw

* look in albert home while lvk_dropbox is down

* new script to convert multiorder maps

* pointing back to dropbox

* nside checks, 512 for 1000s followup

* nside arg for moc conversion script

* public webpage labels for sig and subthr

* return if real event in heartbeat mode
  • Loading branch information
jessiethw authored Jul 3, 2023
1 parent fba3b77 commit 08d22c6
Show file tree
Hide file tree
Showing 11 changed files with 304 additions and 110 deletions.
18 changes: 11 additions & 7 deletions fast_response/FastResponseAnalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,16 +439,17 @@ def save_results(self, alt_path=None):
print("Results successfully saved")
return self.save_items

def plot_ontime(self, with_contour=False, contour_files=None):
def plot_ontime(self, with_contour=False, contour_files=None, label_events=False):
r'''Plots ontime events with either the full sky spatial
prior or a zoomed in version like traditional fast response
followups
label_events: adds a number label to events on skymap
'''
try:
self.plot_skymap_zoom(with_contour=with_contour, contour_files=contour_files)
except:
print('Failed to make skymap zoom plot')
self.plot_skymap(with_contour=with_contour, contour_files=contour_files)
self.plot_skymap(with_contour=with_contour, contour_files=contour_files, label_events=label_events)

def plot_skymap_zoom(self, with_contour=False, contour_files=None):
r'''Make a zoomed in portion of a skymap with
Expand Down Expand Up @@ -516,7 +517,7 @@ def plot_skymap_zoom(self, with_contour=False, contour_files=None):
contour_counter += 1

if with_contour:
probs = hp.pixelfunc.ud_grade(self.skymap, 64)
probs = hp.pixelfunc.ud_grade(self.skymap, 64, power=-2)
probs = probs/np.sum(probs)
### plot 90% containment contour of PDF
levels = [0.9]
Expand All @@ -534,7 +535,7 @@ def plot_skymap_zoom(self, with_contour=False, contour_files=None):
plt.savefig(self.analysispath + '/' + self.analysisid + 'unblinded_skymap_zoom.pdf',bbox_inches='tight', dpi=300)
plt.close()

def plot_skymap(self, with_contour=False, contour_files=None):
def plot_skymap(self, with_contour=False, contour_files=None, label_events=False):
r''' Make skymap with event localization and all
neutrino events on the sky within the given time window
'''
Expand Down Expand Up @@ -599,6 +600,9 @@ def plot_skymap(self, with_contour=False, contour_files=None):
# plot events on sky with error contours
handles=[]
hp.projscatter(theta,phi,c=cols,marker='x',label='GFU Event',coord='C', zorder=5)
if label_events:
for j in range(len(theta)):
hp.projtext(theta[j], phi[j]-0.11, '{}'.format(j), color='red', fontsize=18, zorder=6)
handles.append(Line2D([0], [0], marker='x', ls='None', label='GFU Event'))

if (self.stop - self.start) <= 0.5: #Only plot contours if less than 2 days
Expand Down Expand Up @@ -627,7 +631,7 @@ def plot_skymap(self, with_contour=False, contour_files=None):
contour_counter += 1

if with_contour and self.skymap is not None:
probs = hp.pixelfunc.ud_grade(self.skymap, 64)
probs = hp.pixelfunc.ud_grade(self.skymap, 64, power=-2)
probs = probs/np.sum(probs)
### plot 90% containment contour of PDF
levels = [0.9]
Expand Down Expand Up @@ -684,7 +688,7 @@ def __init__(self, name, skymap_path, tstart, tstop, skipped=None, seed=None,

super().__init__(name, tstart, tstop, skipped=skipped, seed=seed,
outdir=outdir, save=save, extension=extension)

self.skymap_path = skymap_path
try:
skymap, skymap_header = hp.read_map(skymap_path, h=True, verbose=False)
Expand Down Expand Up @@ -714,7 +718,7 @@ def __str__(self):

def format_skymap(self, skymap):
if hp.pixelfunc.get_nside(skymap) != self._nside:
skymap = hp.pixelfunc.ud_grade(skymap, self._nside)
skymap = hp.pixelfunc.ud_grade(skymap, self._nside, power=-2)
skymap = skymap/skymap.sum()
return skymap

Expand Down
27 changes: 24 additions & 3 deletions fast_response/GWFollowup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
mpl.use('agg')
import matplotlib.pyplot as plt
import pickle
from astropy.time import Time

from .FastResponseAnalysis import PriorFollowup
from .reports import GravitationalWaveReport
Expand All @@ -28,10 +29,21 @@ class GWFollowup(PriorFollowup):
_pixel_scan_nsigma = 3.0
_allow_neg = True
_containment = None
_nside = 256
#_nside = 256
_season_names = ['IC86, 2017', 'IC86, 2018', 'IC86, 2019']
_nb_days = 5.
_ncpu = 10

def __init__(self, name, skymap_path, tstart, tstop, skipped=None, seed=None,
outdir=None, save=True, extension=None):
if (Time(tstop, format='iso').mjd - Time(tstart, format='iso').mjd) < (1001./86400.):
#Raamis uses 512 for bg trials
self._nside = 512
else:
#use 256 for 2 week
self._nside = 256
super().__init__(name, skymap_path, tstart, tstop, skipped=skipped, seed=seed,
outdir=outdir, save=save, extension=extension)

def run_background_trials(self, month=None, ntrials=1000):
'''For GW followup, 2 possible time windows:
Expand Down Expand Up @@ -61,6 +73,12 @@ def run_background_trials(self, month=None, ntrials=1000):
+ 'gw_precomputed_trials_delta_t_'
+ '{:.2e}_trials_rate_{:.1f}_low_stats.npz'.format(
self.duration * 86400., closest_rate, self.duration * 86400.))

#check for nside mismatch
if hp.pixelfunc.get_nside(pre_ts_array.shape[1]) != self.nside:
print('Error! Analysis uses nside of %i '.format(self.nside)+
'while precomputed BG is nside %i'.format(hp.pixelfunc.get_nside(pre_ts_array.shape[1])))

ts_norm = np.log(np.amax(self.skymap))
ts_prior = pre_ts_array.copy()
ts_prior.data += 2.*(np.log(self.skymap[pre_ts_array.indices]) - ts_norm)
Expand All @@ -86,6 +104,9 @@ def run_background_trials(self, month=None, ntrials=1000):
allow_pickle=True,
encoding='latin1'
)
if 512 != self.nside:
print('Error! Analysis uses nside of %i '.format(self.nside)+
'while precomputed BG is nside 512')

# Create spatial prior weighting
max_ts = []
Expand Down Expand Up @@ -289,8 +310,8 @@ def get_best_fit_contour(self, proportions=[0.5,0.9]):
plt.savefig(self.analysispath + '/' + self.analysisid + 'ts_contours.pdf',bbox_inches='tight', dpi=300)
plt.close()

def plot_ontime(self, with_contour=True, contour_files=None):
return super().plot_ontime(with_contour=True, contour_files=contour_files)
def plot_ontime(self, with_contour=True, contour_files=None, label_events=True):
return super().plot_ontime(with_contour=True, contour_files=contour_files, label_events=label_events)

def write_circular(self):
base = os.path.dirname(fast_response.__file__)
Expand Down
4 changes: 3 additions & 1 deletion fast_response/circular_templates/gw_gcn_template_high.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ were conducted. The first search is a maximum likelihood analysis which searches
generic point-like neutrino source coincident with the given GW skymap. The
second uses a Bayesian approach to quantify the joint GW + neutrino event significance, which
assumes a binary merger scenario and accounts for known astrophysical priors, such as GW source
distance, in the significance estimate.
distance, in the significance estimate [3].

<N> track-like event(s) are found in spatial and temporal coincidence with the gravitational-wave
candidate <name> calculated from the map circulated <give notice ID here..>. This
Expand All @@ -25,6 +25,7 @@ The reported p-values can differ due to the estimated distance of the GW candida
The distance is used as a prior in the Bayesian binary merger search, while it is not
taken into account in the generic transient point-like source search. The false alarm rate of these
coincidences can be obtained by multiplying the p-values with their corresponding GW trigger rates.
Further details are available at https://gcn.nasa.gov/missions/icecube.

Properties of the coincident events are shown below.

Expand All @@ -46,5 +47,6 @@ reached at roc@icecube.wisc.edu

[1] M. G. Aartsen et al 2020 ApJL 898 L10
[2] Abbasi et al. Astrophys.J. 944 (2023) 1, 80
[3] I. Bartos et al. 2019 Phys. Rev. D 100, 083017

********************************
41 changes: 24 additions & 17 deletions fast_response/listeners/gcn_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,22 +38,24 @@ def process_gcn(payload, root):
if stream == '26':
print("Detected cascade type alert, running cascade followup. . . ")
alert_type='cascade'
event_name='IceCube-Cascade_{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10])

skymap = params['skymap_fits']
else:
print("Found track type alert, running track followup. . . ")
alert_type='track'
event_name='IceCube-{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10])

# IceCube sends 2: a notice and a revision, only want to run once
if int(params['Rev']) !=0:
return
return

event_id = params['event_id']
run_id = params['run_id']
eventtime = root.find('.//ISOTime').text
event_mjd = Time(eventtime, format='isot').mjd
try:
bot.send_message(f'Listener found {alert_type} type alert, '+
f'IceCube-{eventtime[2:4]}{eventtime[5:7]}{eventtime[8:10]} \n'+
bot.send_message(f'Listener found {alert_type} type alert, {event_name} \n'+
'Waiting 1 day to run FRA', 'blanket_blob')
print(' - slack message sent \n')
except Exception as e:
Expand Down Expand Up @@ -90,10 +92,6 @@ def process_gcn(payload, root):
return

#checking for events on the same day: looks for existing output files from previous runs
if alert_type=='track':
event_name='IceCube-{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10])
else:
event_name='IceCube-Cascade_{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10])
count_dir=0
for directory in os.listdir(os.environ.get('FAST_RESPONSE_OUTPUT')):
if event_name in directory: count_dir+=1
Expand All @@ -112,29 +110,38 @@ def process_gcn(payload, root):
'--alert_id={}'.format(run_id+':'+event_id),
'--suffix={}'.format(suffix)]
)


if args.document:
try:
dir_1000 = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),
'*{}_1.0e+03_s').format(event_name))
subprocess.call([analysis_path+'document.py', '--path', dir_1000[0]])
dir_2d = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),
'*{}_1.7e+05_s').format(event_name))
subprocess.call([analysis_path+'document.py', '--path', dir_2d[0]])
except:
print('Failed to document to private webpage')

try:
shifters = pd.read_csv(os.path.join(analysis_path,'../slack_posters/fra_shifters.csv'),
parse_dates=[0,1])
on_shift=''
for i in shifters.index:
if shifters['start'][i]<datetime.utcnow()<shifters['stop'][i]:
on_shift+='<@{}> '.format(shifters['slack_id'][i])
bot.send_message(f'Done running FRA for {alert_type} alert, {event_name}.\n '+ on_shift +'on shift',
link = 'https://user-web.icecube.wisc.edu/~jthwaites/FastResponse/webpage/output/'
wp_link_1000 = '{}{}_1.0e+03_s.html'.format(link, eventtime[0:10].replace('-','_')+'_'+event_name)
wp_link_2d = '{}{}_1.7e+05_s.html'.format(link, eventtime[0:10].replace('-','_')+'_'+event_name)
bot.send_message(f'Done running FRA for {alert_type} alert, {event_name}.\n ' +
"Results for 1000s: <{}|link>. \n ".format(wp_link_1000) +
"Results for 2d: <{}|link>. \n".format(wp_link_2d) +
+ on_shift +'on shift',
'blanket_blob')
print(' - slack message sent \n')
except Exception as e:
print(e)
print('No slack message sent.')

#if args.document:
# dir_1000 = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),
# '*{}_1.0e+03_s').format(event_name))
# subprocess.call([analysis_path+'document.py', '--path', dir_1000])
# dir_2d = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),
# '*{}_1.7e+05_s').format(event_name))
# subprocess.call([analysis_path+'document.py', '--path', dir_2d])

if __name__ == '__main__':
import os, subprocess
import healpy as hp
Expand Down
40 changes: 29 additions & 11 deletions fast_response/listeners/gw_gcn_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,30 +43,27 @@ def process_gcn(payload, root):
for elem in root.iterfind('.//Param')}
name = root.attrib['ivorn'].split('#')[1]

# if this is the listener for real events and it gets a mock, skip running on it
if not mock and root.attrib['role']!='observation':
return
# only run on significant events
if 'Significant' in params.keys():
if int(params['Significant'])==0:
#not significant, do not run
print(f'Found a subthreshold event {name}')
root.attrib['role']='test'
log_file.flush()
return
#return
else:
# O3 does not have this parameter, this should only happen for testing
print('No significance parameter found in LVK GCN.')
log_file.flush()
# if this is the listener for real events and it gets a mock (or low signficance), skip it
if not mock and root.attrib['role']!='observation':
return

print('\n' +'INCOMING ALERT FOUND: ',datetime.utcnow())

log_file.flush()

if root.attrib['role']=='observation' and not mock:
## Call everyone because it's a real event!
username = pwd.getpwuid(os.getuid())[0]
#if username == 'realtime':
# call_command = [os.path.join(analysis_path, 'make_call.py')]
#else:
# call_command=['/cvmfs/icecube.opensciencegrid.org/users/jthwaites/make_call.py']
call_command=['/cvmfs/icecube.opensciencegrid.org/users/jthwaites/make_call.py']

call_args = ['--justin']
Expand All @@ -79,6 +76,12 @@ def process_gcn(payload, root):
print('Call failed.')
print(e)
log_file.flush()

# want heartbeat listener not to run on real events, otherwise it overwrites the main listener output
if mock and root.attrib['role']=='observation':
print('Listener in heartbeat mode found real event. Returning...')
log_file.flush()
return

# Read trigger time of event
eventtime = root.find('.//ISOTime').text
Expand Down Expand Up @@ -125,6 +128,7 @@ def process_gcn(payload, root):
except:
print('Failed to download skymap in correct format! \nDownload skymap and then re-run script with')
print(f'args: --time {event_mjd} --name {name} --skymap PATH_TO_SKYMAP')
log_file.flush()
return
#TODO: add make_call to me here if this fails

Expand All @@ -149,6 +153,18 @@ def process_gcn(payload, root):
if not mock and root.attrib['role'] == 'observation':
try:
subprocess.call([webpage_update, '--gw', f'--path={output}'])

wp_link = 'https://user-web.icecube.wisc.edu/~jthwaites/FastResponse/gw-webpage/output/{}.html'.format(eventtime[0:10].replace('-','_')+'_'+name)
slack_message = "UML GW analysis finished running for event {}: <{}|link>.".format(name, wp_link)
with open('../slack_posters/internal_alert_slackbot.txt') as f:
chan = f.readline().rstrip('\n')
webhook = f.readline().rstrip('\n')
bot_name = f.readline().rstrip('\n')

for channel in ['#fra-shifting','#gwnu']:
bot = slackbot(channel, bot_name, webhook)
bot.send_message(slack_message,'gw')

except Exception as e:
print('Failed to push to (private) webpage.')
print(e)
Expand Down Expand Up @@ -202,6 +218,7 @@ def process_gcn(payload, root):
import time
from astropy.time import Time
from datetime import datetime
from fast_response.slack_posters.slack import slackbot
import wget

output_path = '/home/jthwaites/public_html/FastResponse/'
Expand Down Expand Up @@ -256,7 +273,8 @@ def process_gcn(payload, root):
print(e)
sample_skymap_path='/data/user/jthwaites/o3-gw-skymaps/'

payload = open(os.path.join(sample_skymap_path,args.test_path), 'rb').read()
#payload = open(os.path.join(sample_skymap_path,args.test_path), 'rb').read()
payload = open(args.test_path,'rb').read()
root = lxml.etree.fromstring(payload)

mock=args.heartbeat
Expand Down
19 changes: 14 additions & 5 deletions fast_response/reports/ReportGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,16 +200,23 @@ def make_coinc_events_table(self, f):
else:
if 'pvalue' in self.analysis.coincident_events[0].keys():
with_p = True
event_table+=[
('$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec',
'\sigma (90\%)','E_{reco} (GeV)',
'p-value','In 90\% Contour')]
if len(self.analysis.coincident_events)<100:
event_table+=[
('Event','$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec',
'\sigma (90\%)','E_{reco} (GeV)',
'p-value','In 90\% Contour')]
else:
event_table+=[
('$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec',
'\sigma (90\%)','E_{reco} (GeV)',
'p-value','In 90\% Contour')]
else:
with_p = False
event_table+=[
('$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec',
'\sigma (90\%)','E_{reco} (GeV)',
'In 90\% Contour')]
i = 0
for event in self.analysis.coincident_events:
if with_p:
if len(self.analysis.coincident_events)>100:
Expand All @@ -225,14 +232,16 @@ def make_coinc_events_table(self, f):
)]
else:
event_table+=[
('{:.0f}'.format((event['time']-self.source['trigger_mjd'])*86400.),
('{}'.format(i),
'{:.0f}'.format((event['time']-self.source['trigger_mjd'])*86400.),
"{:3.2f}\degree".format(np.rad2deg(event['ra'])),
'{:3.2f}\degree'.format(np.rad2deg(event['dec'])),
"{:3.2f}\degree".format(np.rad2deg(event["sigma"]*self.analysis._angScale)),
"{:.2E}".format(10**event['logE']),
'{:.4f}'.format(event['pvalue']),
str(bool(event['in_contour']))
)]
i+=1
else:
if len(self.analysis.coincident_events)>100:
if event['in_contour']:
Expand Down
Loading

0 comments on commit 08d22c6

Please sign in to comment.