From 08d22c63fa9b19f0d056e4cb8ea0e70ccb9a876b Mon Sep 17 00:00:00 2001 From: Jessie Thwaites Date: Mon, 3 Jul 2023 13:50:58 -0500 Subject: [PATCH] O4 realtime (#49) * add slackbot to post link when done * add heartbeat option and post gcn to slack * add link to gcn docs * labeling events for easy pval comp * add bartos et al * round duration * correct event name for cascades * run scrambled on subthresh, post reports * multiple channels for gcn if high signficance * updating public webpages * document if on realtime * don't label events if not 1000s tw * look in albert home while lvk_dropbox is down * new script to convert multiorder maps * pointing back to dropbox * nside checks, 512 for 1000s followup * nside arg for moc conversion script * public webpage labels for sig and subthr * return if real event in heartbeat mode --- fast_response/FastResponseAnalysis.py | 18 ++- fast_response/GWFollowup.py | 27 +++- .../gw_gcn_template_high.txt | 4 +- fast_response/listeners/gcn_listener.py | 41 +++--- fast_response/listeners/gw_gcn_listener.py | 40 ++++-- fast_response/reports/ReportGenerator.py | 19 ++- .../scripts/combine_results_kafka.py | 88 +++++++++--- .../scripts/convert_moc_to_healpix.py | 29 ++++ fast_response/scripts/run_gw_followup.py | 7 +- fast_response/web_utils.py | 127 ++++++++++++------ html/gw_pub_templ_base.html | 14 +- 11 files changed, 304 insertions(+), 110 deletions(-) create mode 100644 fast_response/scripts/convert_moc_to_healpix.py diff --git a/fast_response/FastResponseAnalysis.py b/fast_response/FastResponseAnalysis.py index 3e4d72c..134fd09 100644 --- a/fast_response/FastResponseAnalysis.py +++ b/fast_response/FastResponseAnalysis.py @@ -439,16 +439,17 @@ def save_results(self, alt_path=None): print("Results successfully saved") return self.save_items - def plot_ontime(self, with_contour=False, contour_files=None): + def plot_ontime(self, with_contour=False, contour_files=None, label_events=False): r'''Plots ontime events with either the full sky spatial prior or a zoomed in version like traditional fast response followups + label_events: adds a number label to events on skymap ''' try: self.plot_skymap_zoom(with_contour=with_contour, contour_files=contour_files) except: print('Failed to make skymap zoom plot') - self.plot_skymap(with_contour=with_contour, contour_files=contour_files) + self.plot_skymap(with_contour=with_contour, contour_files=contour_files, label_events=label_events) def plot_skymap_zoom(self, with_contour=False, contour_files=None): r'''Make a zoomed in portion of a skymap with @@ -516,7 +517,7 @@ def plot_skymap_zoom(self, with_contour=False, contour_files=None): contour_counter += 1 if with_contour: - probs = hp.pixelfunc.ud_grade(self.skymap, 64) + probs = hp.pixelfunc.ud_grade(self.skymap, 64, power=-2) probs = probs/np.sum(probs) ### plot 90% containment contour of PDF levels = [0.9] @@ -534,7 +535,7 @@ def plot_skymap_zoom(self, with_contour=False, contour_files=None): plt.savefig(self.analysispath + '/' + self.analysisid + 'unblinded_skymap_zoom.pdf',bbox_inches='tight', dpi=300) plt.close() - def plot_skymap(self, with_contour=False, contour_files=None): + def plot_skymap(self, with_contour=False, contour_files=None, label_events=False): r''' Make skymap with event localization and all neutrino events on the sky within the given time window ''' @@ -599,6 +600,9 @@ def plot_skymap(self, with_contour=False, contour_files=None): # plot events on sky with error contours handles=[] hp.projscatter(theta,phi,c=cols,marker='x',label='GFU Event',coord='C', zorder=5) + if label_events: + for j in range(len(theta)): + hp.projtext(theta[j], phi[j]-0.11, '{}'.format(j), color='red', fontsize=18, zorder=6) handles.append(Line2D([0], [0], marker='x', ls='None', label='GFU Event')) if (self.stop - self.start) <= 0.5: #Only plot contours if less than 2 days @@ -627,7 +631,7 @@ def plot_skymap(self, with_contour=False, contour_files=None): contour_counter += 1 if with_contour and self.skymap is not None: - probs = hp.pixelfunc.ud_grade(self.skymap, 64) + probs = hp.pixelfunc.ud_grade(self.skymap, 64, power=-2) probs = probs/np.sum(probs) ### plot 90% containment contour of PDF levels = [0.9] @@ -684,7 +688,7 @@ def __init__(self, name, skymap_path, tstart, tstop, skipped=None, seed=None, super().__init__(name, tstart, tstop, skipped=skipped, seed=seed, outdir=outdir, save=save, extension=extension) - + self.skymap_path = skymap_path try: skymap, skymap_header = hp.read_map(skymap_path, h=True, verbose=False) @@ -714,7 +718,7 @@ def __str__(self): def format_skymap(self, skymap): if hp.pixelfunc.get_nside(skymap) != self._nside: - skymap = hp.pixelfunc.ud_grade(skymap, self._nside) + skymap = hp.pixelfunc.ud_grade(skymap, self._nside, power=-2) skymap = skymap/skymap.sum() return skymap diff --git a/fast_response/GWFollowup.py b/fast_response/GWFollowup.py index fe9bf32..4af2024 100644 --- a/fast_response/GWFollowup.py +++ b/fast_response/GWFollowup.py @@ -9,6 +9,7 @@ mpl.use('agg') import matplotlib.pyplot as plt import pickle +from astropy.time import Time from .FastResponseAnalysis import PriorFollowup from .reports import GravitationalWaveReport @@ -28,10 +29,21 @@ class GWFollowup(PriorFollowup): _pixel_scan_nsigma = 3.0 _allow_neg = True _containment = None - _nside = 256 + #_nside = 256 _season_names = ['IC86, 2017', 'IC86, 2018', 'IC86, 2019'] _nb_days = 5. _ncpu = 10 + + def __init__(self, name, skymap_path, tstart, tstop, skipped=None, seed=None, + outdir=None, save=True, extension=None): + if (Time(tstop, format='iso').mjd - Time(tstart, format='iso').mjd) < (1001./86400.): + #Raamis uses 512 for bg trials + self._nside = 512 + else: + #use 256 for 2 week + self._nside = 256 + super().__init__(name, skymap_path, tstart, tstop, skipped=skipped, seed=seed, + outdir=outdir, save=save, extension=extension) def run_background_trials(self, month=None, ntrials=1000): '''For GW followup, 2 possible time windows: @@ -61,6 +73,12 @@ def run_background_trials(self, month=None, ntrials=1000): + 'gw_precomputed_trials_delta_t_' + '{:.2e}_trials_rate_{:.1f}_low_stats.npz'.format( self.duration * 86400., closest_rate, self.duration * 86400.)) + + #check for nside mismatch + if hp.pixelfunc.get_nside(pre_ts_array.shape[1]) != self.nside: + print('Error! Analysis uses nside of %i '.format(self.nside)+ + 'while precomputed BG is nside %i'.format(hp.pixelfunc.get_nside(pre_ts_array.shape[1]))) + ts_norm = np.log(np.amax(self.skymap)) ts_prior = pre_ts_array.copy() ts_prior.data += 2.*(np.log(self.skymap[pre_ts_array.indices]) - ts_norm) @@ -86,6 +104,9 @@ def run_background_trials(self, month=None, ntrials=1000): allow_pickle=True, encoding='latin1' ) + if 512 != self.nside: + print('Error! Analysis uses nside of %i '.format(self.nside)+ + 'while precomputed BG is nside 512') # Create spatial prior weighting max_ts = [] @@ -289,8 +310,8 @@ def get_best_fit_contour(self, proportions=[0.5,0.9]): plt.savefig(self.analysispath + '/' + self.analysisid + 'ts_contours.pdf',bbox_inches='tight', dpi=300) plt.close() - def plot_ontime(self, with_contour=True, contour_files=None): - return super().plot_ontime(with_contour=True, contour_files=contour_files) + def plot_ontime(self, with_contour=True, contour_files=None, label_events=True): + return super().plot_ontime(with_contour=True, contour_files=contour_files, label_events=label_events) def write_circular(self): base = os.path.dirname(fast_response.__file__) diff --git a/fast_response/circular_templates/gw_gcn_template_high.txt b/fast_response/circular_templates/gw_gcn_template_high.txt index 4f9a14b..5aa6293 100644 --- a/fast_response/circular_templates/gw_gcn_template_high.txt +++ b/fast_response/circular_templates/gw_gcn_template_high.txt @@ -11,7 +11,7 @@ were conducted. The first search is a maximum likelihood analysis which searches generic point-like neutrino source coincident with the given GW skymap. The second uses a Bayesian approach to quantify the joint GW + neutrino event significance, which assumes a binary merger scenario and accounts for known astrophysical priors, such as GW source -distance, in the significance estimate. +distance, in the significance estimate [3]. track-like event(s) are found in spatial and temporal coincidence with the gravitational-wave candidate calculated from the map circulated . This @@ -25,6 +25,7 @@ The reported p-values can differ due to the estimated distance of the GW candida The distance is used as a prior in the Bayesian binary merger search, while it is not taken into account in the generic transient point-like source search. The false alarm rate of these coincidences can be obtained by multiplying the p-values with their corresponding GW trigger rates. +Further details are available at https://gcn.nasa.gov/missions/icecube. Properties of the coincident events are shown below. @@ -46,5 +47,6 @@ reached at roc@icecube.wisc.edu [1] M. G. Aartsen et al 2020 ApJL 898 L10 [2] Abbasi et al. Astrophys.J. 944 (2023) 1, 80 +[3] I. Bartos et al. 2019 Phys. Rev. D 100, 083017 ******************************** diff --git a/fast_response/listeners/gcn_listener.py b/fast_response/listeners/gcn_listener.py index e2f5a5a..6dfbc55 100644 --- a/fast_response/listeners/gcn_listener.py +++ b/fast_response/listeners/gcn_listener.py @@ -38,22 +38,24 @@ def process_gcn(payload, root): if stream == '26': print("Detected cascade type alert, running cascade followup. . . ") alert_type='cascade' + event_name='IceCube-Cascade_{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10]) + skymap = params['skymap_fits'] else: print("Found track type alert, running track followup. . . ") alert_type='track' + event_name='IceCube-{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10]) # IceCube sends 2: a notice and a revision, only want to run once if int(params['Rev']) !=0: - return + return event_id = params['event_id'] run_id = params['run_id'] eventtime = root.find('.//ISOTime').text event_mjd = Time(eventtime, format='isot').mjd try: - bot.send_message(f'Listener found {alert_type} type alert, '+ - f'IceCube-{eventtime[2:4]}{eventtime[5:7]}{eventtime[8:10]} \n'+ + bot.send_message(f'Listener found {alert_type} type alert, {event_name} \n'+ 'Waiting 1 day to run FRA', 'blanket_blob') print(' - slack message sent \n') except Exception as e: @@ -90,10 +92,6 @@ def process_gcn(payload, root): return #checking for events on the same day: looks for existing output files from previous runs - if alert_type=='track': - event_name='IceCube-{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10]) - else: - event_name='IceCube-Cascade_{}{}{}'.format(eventtime[2:4],eventtime[5:7],eventtime[8:10]) count_dir=0 for directory in os.listdir(os.environ.get('FAST_RESPONSE_OUTPUT')): if event_name in directory: count_dir+=1 @@ -112,7 +110,18 @@ def process_gcn(payload, root): '--alert_id={}'.format(run_id+':'+event_id), '--suffix={}'.format(suffix)] ) - + + if args.document: + try: + dir_1000 = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'), + '*{}_1.0e+03_s').format(event_name)) + subprocess.call([analysis_path+'document.py', '--path', dir_1000[0]]) + dir_2d = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'), + '*{}_1.7e+05_s').format(event_name)) + subprocess.call([analysis_path+'document.py', '--path', dir_2d[0]]) + except: + print('Failed to document to private webpage') + try: shifters = pd.read_csv(os.path.join(analysis_path,'../slack_posters/fra_shifters.csv'), parse_dates=[0,1]) @@ -120,21 +129,19 @@ def process_gcn(payload, root): for i in shifters.index: if shifters['start'][i]. \n ".format(wp_link_1000) + + "Results for 2d: <{}|link>. \n".format(wp_link_2d) + + + on_shift +'on shift', 'blanket_blob') print(' - slack message sent \n') except Exception as e: print(e) print('No slack message sent.') - #if args.document: - # dir_1000 = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'), - # '*{}_1.0e+03_s').format(event_name)) - # subprocess.call([analysis_path+'document.py', '--path', dir_1000]) - # dir_2d = glob.glob(os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'), - # '*{}_1.7e+05_s').format(event_name)) - # subprocess.call([analysis_path+'document.py', '--path', dir_2d]) - if __name__ == '__main__': import os, subprocess import healpy as hp diff --git a/fast_response/listeners/gw_gcn_listener.py b/fast_response/listeners/gw_gcn_listener.py index a37a651..b08be02 100755 --- a/fast_response/listeners/gw_gcn_listener.py +++ b/fast_response/listeners/gw_gcn_listener.py @@ -43,30 +43,27 @@ def process_gcn(payload, root): for elem in root.iterfind('.//Param')} name = root.attrib['ivorn'].split('#')[1] - # if this is the listener for real events and it gets a mock, skip running on it - if not mock and root.attrib['role']!='observation': - return # only run on significant events if 'Significant' in params.keys(): if int(params['Significant'])==0: #not significant, do not run print(f'Found a subthreshold event {name}') + root.attrib['role']='test' log_file.flush() - return + #return else: # O3 does not have this parameter, this should only happen for testing print('No significance parameter found in LVK GCN.') log_file.flush() + # if this is the listener for real events and it gets a mock (or low signficance), skip it + if not mock and root.attrib['role']!='observation': + return print('\n' +'INCOMING ALERT FOUND: ',datetime.utcnow()) - + log_file.flush() + if root.attrib['role']=='observation' and not mock: ## Call everyone because it's a real event! - username = pwd.getpwuid(os.getuid())[0] - #if username == 'realtime': - # call_command = [os.path.join(analysis_path, 'make_call.py')] - #else: - # call_command=['/cvmfs/icecube.opensciencegrid.org/users/jthwaites/make_call.py'] call_command=['/cvmfs/icecube.opensciencegrid.org/users/jthwaites/make_call.py'] call_args = ['--justin'] @@ -79,6 +76,12 @@ def process_gcn(payload, root): print('Call failed.') print(e) log_file.flush() + + # want heartbeat listener not to run on real events, otherwise it overwrites the main listener output + if mock and root.attrib['role']=='observation': + print('Listener in heartbeat mode found real event. Returning...') + log_file.flush() + return # Read trigger time of event eventtime = root.find('.//ISOTime').text @@ -125,6 +128,7 @@ def process_gcn(payload, root): except: print('Failed to download skymap in correct format! \nDownload skymap and then re-run script with') print(f'args: --time {event_mjd} --name {name} --skymap PATH_TO_SKYMAP') + log_file.flush() return #TODO: add make_call to me here if this fails @@ -149,6 +153,18 @@ def process_gcn(payload, root): if not mock and root.attrib['role'] == 'observation': try: subprocess.call([webpage_update, '--gw', f'--path={output}']) + + wp_link = 'https://user-web.icecube.wisc.edu/~jthwaites/FastResponse/gw-webpage/output/{}.html'.format(eventtime[0:10].replace('-','_')+'_'+name) + slack_message = "UML GW analysis finished running for event {}: <{}|link>.".format(name, wp_link) + with open('../slack_posters/internal_alert_slackbot.txt') as f: + chan = f.readline().rstrip('\n') + webhook = f.readline().rstrip('\n') + bot_name = f.readline().rstrip('\n') + + for channel in ['#fra-shifting','#gwnu']: + bot = slackbot(channel, bot_name, webhook) + bot.send_message(slack_message,'gw') + except Exception as e: print('Failed to push to (private) webpage.') print(e) @@ -202,6 +218,7 @@ def process_gcn(payload, root): import time from astropy.time import Time from datetime import datetime + from fast_response.slack_posters.slack import slackbot import wget output_path = '/home/jthwaites/public_html/FastResponse/' @@ -256,7 +273,8 @@ def process_gcn(payload, root): print(e) sample_skymap_path='/data/user/jthwaites/o3-gw-skymaps/' - payload = open(os.path.join(sample_skymap_path,args.test_path), 'rb').read() + #payload = open(os.path.join(sample_skymap_path,args.test_path), 'rb').read() + payload = open(args.test_path,'rb').read() root = lxml.etree.fromstring(payload) mock=args.heartbeat diff --git a/fast_response/reports/ReportGenerator.py b/fast_response/reports/ReportGenerator.py index 9424a87..bf74de2 100644 --- a/fast_response/reports/ReportGenerator.py +++ b/fast_response/reports/ReportGenerator.py @@ -200,16 +200,23 @@ def make_coinc_events_table(self, f): else: if 'pvalue' in self.analysis.coincident_events[0].keys(): with_p = True - event_table+=[ - ('$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec', - '\sigma (90\%)','E_{reco} (GeV)', - 'p-value','In 90\% Contour')] + if len(self.analysis.coincident_events)<100: + event_table+=[ + ('Event','$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec', + '\sigma (90\%)','E_{reco} (GeV)', + 'p-value','In 90\% Contour')] + else: + event_table+=[ + ('$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec', + '\sigma (90\%)','E_{reco} (GeV)', + 'p-value','In 90\% Contour')] else: with_p = False event_table+=[ ('$t_{\\nu}$-$t_{trigger}$ (s)','RA','Dec', '\sigma (90\%)','E_{reco} (GeV)', 'In 90\% Contour')] + i = 0 for event in self.analysis.coincident_events: if with_p: if len(self.analysis.coincident_events)>100: @@ -225,7 +232,8 @@ def make_coinc_events_table(self, f): )] else: event_table+=[ - ('{:.0f}'.format((event['time']-self.source['trigger_mjd'])*86400.), + ('{}'.format(i), + '{:.0f}'.format((event['time']-self.source['trigger_mjd'])*86400.), "{:3.2f}\degree".format(np.rad2deg(event['ra'])), '{:3.2f}\degree'.format(np.rad2deg(event['dec'])), "{:3.2f}\degree".format(np.rad2deg(event["sigma"]*self.analysis._angScale)), @@ -233,6 +241,7 @@ def make_coinc_events_table(self, f): '{:.4f}'.format(event['pvalue']), str(bool(event['in_contour'])) )] + i+=1 else: if len(self.analysis.coincident_events)>100: if event['in_contour']: diff --git a/fast_response/scripts/combine_results_kafka.py b/fast_response/scripts/combine_results_kafka.py index 04c470a..cc873d1 100644 --- a/fast_response/scripts/combine_results_kafka.py +++ b/fast_response/scripts/combine_results_kafka.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -#rm ln 103/4!!! import logging from datetime import datetime import socket @@ -9,17 +8,18 @@ import matplotlib as mpl mpl.use('agg') import matplotlib.pyplot as plt -import io, time, os +import io, time, os, glob import urllib.request, urllib.error, urllib.parse import argparse import json, pickle from gcn_kafka import Consumer -from icecube import realtime_tools +#from icecube import realtime_tools import numpy as np import lxml.etree from astropy.time import Time import dateutil.parser from datetime import datetime +from fast_response.web_utils import updateGW_public with open('/cvmfs/icecube.opensciencegrid.org/users/jthwaites/tokens/kafka_token.txt') as f: client_id = f.readline().rstrip('\n') @@ -167,10 +167,12 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): logger = logging.getLogger() if record.attrib['role']!='observation': - logger.warning('found test event') fra_results_location = '/data/user/jthwaites/o4-mocks/' if not heartbeat: + logger.warning('found test event - not in mock mode. returning') return + else: + logger.warning('found test event') else: logger.warning('ALERT FOUND') fra_results_location = os.environ.get('FAST_RESPONSE_OUTPUT') @@ -218,6 +220,7 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): # wait until the 500s has elapsed for data current_mjd = Time(datetime.utcnow(), scale='utc').mjd needed_delay = 500./84600. + #needed_delay = 720./86400. #12 mins while lvk_dropbox is offline current_delay = current_mjd - event_mjd while current_delay < needed_delay: @@ -250,10 +253,22 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): llama_results_finished = os.path.exists(llama_results_path) else: llama_results_finished = False + #llama_name = '{}*-significance_subthreshold_lvc-i3.json'.format(record.attrib['ivorn'].split('#')[1]) + #llama_results_path = os.path.join(llama_results_location,llama_name) + #if wait_for_llama: + # llama_results_glob = sorted(glob.glob(llama_results_path)) + # if len(llama_results_glob)>0: + # llama_results_path = llama_results_glob[-1] + # llama_results_finished=True + # else: + # llama_results_finished=False + #else: + # llama_results_finished = False if subthreshold and llama_results_finished: #no UML results in subthreshold case, only LLAMA - results_done=True + results_done = True + uml_results_finished = False logger.info('found results for LLAMA for subthreshold event, writing notice') if uml_results_finished and llama_results_finished: @@ -286,14 +301,14 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): collected_results['observation_livetime'] = 1000 ### COLLECT RESULTS ### - additional_website_params = {} + #additional_website_params = {} if uml_results_finished: with open(uml_results_path, 'rb') as f: uml_results = pickle.load(f) - for key in ['skymap_path', 'analysisid']: - if key in uml_results.keys(): - additional_website_params[key] = uml_results[key] + #for key in ['skymap_path', 'analysisid']: + # if key in uml_results.keys(): + # additional_website_params[key] = uml_results[key] if llama_results_finished: with open(llama_results_path, 'r') as f: @@ -412,6 +427,36 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): st = 'sent' if status == 0 else 'error!' logger.info('status: {}'.format(status)) logger.info('{}'.format(st)) + + if status ==0: + with open('/cvmfs/icecube.opensciencegrid.org/users/jthwaites/tokens/gw_token.txt') as f: + my_key = f.readline() + + if not subthreshold: + channels = ['#gwnu-heartbeat', '#gwnu','#alerts'] + else: + channels = ['#gwnu-heartbeat'] + for channel in channels: + with open(os.path.join(save_location, f'{name}_collected_results.json'),'r') as fi: + response = requests.post('https://slack.com/api/files.upload', + timeout=60, + params={'token': my_key}, + data={'filename':'gcn.json', + 'title': f'GCN Notice for {name}', + 'channels': channel}, + files={'file': fi} + ) + if response.ok is True: + logger.info("GCN posted OK to {}".format(channel)) + else: + logger.info("Error posting skymap to {}!".format(channel)) + + collected_results['subthreshold'] = subthreshold + try: + updateGW_public(collected_results) + logger.info('Updated webpage.') + except: + logger.warning('Failed to push to public webpage.') else: with open(os.path.join(save_location, f'mocks/{name}_collected_results.json'),'w') as f: @@ -419,11 +464,24 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): #status = SendTestAlert(results = collected_results) #logger.info('status: {}'.format(status)) - # Adding a few extra keys needed to create public webpage - for key in additional_website_params.keys(): - collected_results[key] = additional_website_params[key] - - #web_utils.updateGW_public(collected_results) + #send the notice to slack (#gw-mock-heartbeat) + with open('/cvmfs/icecube.opensciencegrid.org/users/jthwaites/tokens/gw_token.txt') as f: + my_key = f.readline() + + channel = '#gw-mock-heartbeat' + with open(os.path.join(save_location, f'mocks/{name}_collected_results.json'),'r') as fi: + response = requests.post('https://slack.com/api/files.upload', + timeout=60, + params={'token': my_key}, + data={'filename':'gcn.json', + 'title': f'GCN Notice for {name}', + 'channels': channel}, + files={'file': fi} + ) + if response.ok is True: + logger.info("GCN posted OK to {}".format(channel)) + else: + logger.info("Error posting skymap to {}!".format(channel)) parser = argparse.ArgumentParser(description='Combine GW-Nu results') parser.add_argument('--run_live', action='store_true', default=False, @@ -446,6 +504,7 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): #fra_results_location = os.environ.get('FAST_RESPONSE_OUTPUT')#'/data/user/jthwaites/o4-mocks/' llama_results_location = '/home/followup/lvk_dropbox/' +#llama_results_location = '/home/azhang/public_html/llama/json/' #save_location = '/home/followup/lvk_followup_output/' #where to save final json save_location = args.save_dir @@ -467,7 +526,6 @@ def parse_notice(record, wait_for_llama=False, heartbeat=False): else: if args.test_path is None: - import glob paths=glob.glob('/home/jthwaites/FastResponse/*/*xml') path = paths[1] #path = '/home/jthwaites/FastResponse/S230522n-preliminary.json,1' diff --git a/fast_response/scripts/convert_moc_to_healpix.py b/fast_response/scripts/convert_moc_to_healpix.py new file mode 100644 index 0000000..9c29c18 --- /dev/null +++ b/fast_response/scripts/convert_moc_to_healpix.py @@ -0,0 +1,29 @@ +import numpy as np +import matplotlib as mpl +mpl.use('agg') +import mhealpy as mhp +import healpy as hp +from astropy.table import QTable +from astropy import units as u +import argparse + +parser = argparse.ArgumentParser(description='GW Followup') +parser.add_argument('--skymap', type=str, default=None, + help='path to skymap (should be the *.multiorder.fits downloaded from GraceDB)') +parser.add_argument('--nside', type=str, default=512, + help='nside to be used with the skymap (default=512)') +args = parser.parse_args() + +# Read skymap +skymap = QTable.read(args.skymap) +# convert to a probability in each pixel by multiplying by pix area +s = [skymap[i]['PROBDENSITY'].to_value(u.deg**-2)*hp.nside2pixarea(mhp.uniq2nside(skymap[i]['UNIQ']),degrees=True) + for i in range(len(skymap))] + +# put into mhealpy & get flattened map +m = mhp.HealpixMap(data=s, uniq=skymap['UNIQ']) +#returns a new map +new_map = m.rasterize(args.nside, 'NESTED') + +# save the new map +new_map.write_map(args.skymap.replace('multiorder','converted'), overwrite=True) \ No newline at end of file diff --git a/fast_response/scripts/run_gw_followup.py b/fast_response/scripts/run_gw_followup.py index 846e8a7..84d8566 100755 --- a/fast_response/scripts/run_gw_followup.py +++ b/fast_response/scripts/run_gw_followup.py @@ -20,7 +20,7 @@ help='Time of the GW (mjd)') parser.add_argument('--name', type=str, default="name of GW event being followed up") -parser.add_argument('--tw', default = 1000, type=int, +parser.add_argument('--tw', default = 1000, type=int, #2 week: 1218240 help = 'Time window for the analysis (default = 1000)') parser.add_argument('--allow_neg_ts', type=bool, default=False, help='bool to allow negative TS values in gw analysis.') @@ -55,7 +55,10 @@ f.save_items['skymap_link'] = args.skymap f.unblind_TS() -f.plot_ontime() +if args.tw>1000: + f.plot_ontime(label_events = False) +else: + f.plot_ontime() f.calc_pvalue() f.make_dNdE() diff --git a/fast_response/web_utils.py b/fast_response/web_utils.py index 3557d75..e3369a7 100644 --- a/fast_response/web_utils.py +++ b/fast_response/web_utils.py @@ -12,6 +12,7 @@ import matplotlib.dates as mdates import matplotlib.pyplot as plt import fast_response +import dateutil.parser username = pwd.getpwuid(os.getuid())[0] if username == 'realtime': username='jthwaites' @@ -132,7 +133,7 @@ def createFastResponsePage(analysis, gw=False): new_f[i] = new_f[i].replace('ANALYSISSTART', Time(analysis['start'], format='mjd').iso) if 'ANALYSISDURATION' in new_f[i]: dur = (analysis['stop'] - analysis['start']) * 86400. - new_f[i] = new_f[i].replace('ANALYSISDURATION', str(dur)) + new_f[i] = new_f[i].replace('ANALYSISDURATION', str(round(dur,2))) if 'ANALYSISDEC' in new_f[i]: dec = '-' if 'dec' not in analysis.keys() else '{:+.2f}'.format(analysis['dec'] * 180. / np.pi) new_f[i] = new_f[i].replace('ANALYSISDEC', dec) @@ -286,55 +287,80 @@ def updateFastResponsePlots(gw=False): plt.savefig(pval_dist_path, dpi=200, bbox_inches='tight') #plt.savefig(f'/home/{username}/public_html/FastResponse/webpage/output/pvalue_distribution_liveupdate.png', dpi=200, bbox_inches='tight') -def updateGW_public(analysis): +def updateGW_public(analysis, circular = None): r''' Push information from this analysis to summary tables on GW PUBLIC followup webpage Parameters: ----------- analysis: dictionary of results from UML and LLAMA analyses + circular: GCN circular number, to link to circular rather than notice ''' - duration = (Time(analysis['t_stop'], format='iso').mjd - Time(analysis['t_start'],format = 'iso').mjd)*86400. + #duration = (Time(analysis['observation_stop'], format='iso').mjd - Time(analysis['observation_start'],format = 'iso').mjd)*86400. + duration = analysis['observation_livetime'] if 'analysisid' not in analysis.keys(): - analysis['analysisid'] = analysis['gw_event_name']+'-'+str(analysis['gw_map_num']) + start_date = Time(dateutil.parser.parse(analysis['observation_start'])).datetime + start_str = f'{start_date.year:02d}_' \ + + f'{start_date.month:02d}_{start_date.day:02d}' + analysis['analysisid'] = start_str+'_'+analysis['ref_id'] + analysis['name'] = analysis['ref_id'] if 'test' in analysis['analysisid']: - analysis['name'] = analysis['analysisid'].split('_')[-2]+'_test' - else: - analysis['name'] = analysis['analysisid'].split('_')[-1] + analysis['name'] = analysis['name']+'_test' - if analysis['n_events_coinc'] == 0: + subprocess.call(['cp', + '/home/followup/lvk_followup_output/{}_collected_results.json'.format(analysis['name']), + f'/home/{username}/public_html/public_FRA/gw-webpage/output/']) + + if analysis['n_events_coincident'] == 0: extra_info = '-' else: + outdir = os.path.join(os.environ.get('FAST_RESPONSE_OUTPUT'),analysis['analysisid']) + try: + if not analysis['subthreshold'] and analysis['pval_generic']<0.1: + subprocess.call(['cp', '{}/{}unblinded_skymap_zoom.png'.format(outdir,analysis['analysisid']), + f'/home/{username}/public_html/public_FRA/gw-webpage/output/']) + except: + print('Failed to copy skymap and GCN Notice') createGWEventPage(analysis) - extra_info = 'here'.format(analysis['analysisid']) + extra_info = 'here'.format(analysis['analysisid']) - #### TODO: fix IceCube GCN line here if 'skymap_path' not in analysis.keys(): analysis['skymap_path'] = '' + if 'subthreshold' not in analysis.keys(): + analysis['subthreshold']=False + if circular is not None: + link = 'GCN Circular'.format(circular) + else: + link = 'GCN Notice'.format(analysis['name']) + + if analysis['subthreshold']: + row_start = '' + else: + row_start = '' tag = ''' - + {} {} {} LVK GCN - GW skymap - IceCube GCN + {} {:.2e} {} [{:.3f}, {:.3f}] {} - '''.format(analysis['name'], analysis['t_merger'], - analysis['gw_event_name'].split('-')[0], analysis['skymap_path'], - duration, str(analysis['n_events_coinc']), analysis['sens_low'], - analysis['sens_high'], extra_info) + '''.format(row_start,analysis['name'], analysis['trigger_time'][:-1].replace('T',' '), + analysis['ref_id'].split('-')[0], link, duration, + str(analysis['n_events_coincident']), + analysis["neutrino_flux_sensitivity_range"]['flux_sensitivity'][0], + analysis["neutrino_flux_sensitivity_range"]['flux_sensitivity'][1], extra_info) with open(f"/home/{username}/public_html/public_FRA/gw-webpage/index.html", "r") as f: lines = f.readlines() ind = None for i in range(len(lines)): - if ' Event Name Merger Time (ISO)' in lines[i]: + if '' in lines[i]: ind = i lines[ind+1:ind+1] = [t + '\n' for t in tag.split('\n')] with open(f"/home/{username}/public_html/public_FRA/gw-webpage/index.html", 'w') as f: @@ -363,30 +389,46 @@ def createGWEventPage(analysis): if 'ANALYSISCREATIONTIME' in new_f[i]: new_f[i] = new_f[i].replace('ANALYSISCREATIONTIME', str(datetime.datetime.utcnow())[:-7]) if 'ANALYSISSTART' in new_f[i]: - new_f[i] = new_f[i].replace('ANALYSISSTART', analysis['t_start']) + new_f[i] = new_f[i].replace('ANALYSISSTART', analysis['observation_start']) if 'ANALYSISSTOP' in new_f[i]: - new_f[i] = new_f[i].replace('ANALYSISSTOP', analysis['t_stop']) + new_f[i] = new_f[i].replace('ANALYSISSTOP', analysis['observation_stop']) if 'ANALYSISNAME' in new_f[i]: new_f[i] = new_f[i].replace('ANALYSISNAME', analysis['name']) if 'ANALYSISID' in new_f[i]: new_f[i] = new_f[i].replace('ANALYSISID', analysis['analysisid']) if 'NEVENTS' in new_f[i]: - new_f[i] = new_f[i].replace('NEVENTS', str(analysis['n_events_coinc'])) + new_f[i] = new_f[i].replace('NEVENTS', str(analysis['n_events_coincident'])) if 'UMLPVAL' in new_f[i]: - new_f[i] = new_f[i].replace('UMLPVAL', '{:.2f}'.format(analysis['overall_p_gen_transient'])) - if 'UMLSIG' in new_f[i]: - new_f[i] = new_f[i].replace('UMLSIG', '{:.2f}'.format(analysis['overall_sig_gen_transient'])) + try: + new_f[i] = new_f[i].replace('UMLPVAL', '{}'.format(analysis['pval_generic'])) + except: + new_f[i] = new_f[i].replace('UMLPVAL', 'nan') + #if 'UMLSIG' in new_f[i]: + # new_f[i] = new_f[i].replace('UMLSIG', '{:.2f}'.format(analysis['overall_sig_gen_transient'])) if 'LLAMAPVAL' in new_f[i]: - new_f[i] = new_f[i].replace('LLAMAPVAL', '{:.2f}'.format(analysis['overall_p_bayesian'])) - if 'LLAMASIG' in new_f[i]: - new_f[i] = new_f[i].replace('LLAMASIG', '{:.2f}'.format(analysis['overall_sig_bayesian'])) - + try: + new_f[i] = new_f[i].replace('LLAMAPVAL', '{}'.format(analysis['pval_bayesian'])) + except: + new_f[i] = new_f[i].replace('LLAMAPVAL', 'nan') + #if 'LLAMASIG' in new_f[i]: + # new_f[i] = new_f[i].replace('LLAMASIG', '{:.2f}'.format(analysis['overall_sig_bayesian'])) + if 'MOSTPROBDIR' in new_f[i]: + if 'most_likely_direction' in analysis.keys(): + new_f[i] = new_f[i].replace('MOSTPROBDIR', 'RA: {} deg, decl.: {} deg'.format( + analysis['most_likely_direction']['ra'], + analysis['most_likely_direction']['dec'])) + else: + new_f[i] = new_f[i].replace('MOSTPROBDIR', 'N/A') if '' in new_f[i]: ind = i - + e=1 tag='' - for event in analysis['coinc_events']: + for event in analysis['coincident_events']: + if 'event_pval_bayesian' not in event.keys(): + event['event_pval_bayesian'] = 'nan' + if 'event_pval_generic' not in event.keys(): + event['event_pval_generic'] = 'nan' tag += '''

Event {}

@@ -395,22 +437,23 @@ def createGWEventPage(analysis): - - + +
RA (J2000) (deg.):{:.2f}
Dec (J2000) (deg.):{:.2f}
Angular Unc (deg.):{:.2f}
Event p-value (generic transient):{:.2f}
Event p-value (Bayesian):{:.2f}
Event p-value (generic transient):{}
Event p-value (Bayesian):{}
- '''.format(e, event['dt'], event['ra'], event['dec'], event['ang_unc'], - event['p_gen_transient'], event['p_bayesian']) + '''.format(e, event['event_dt'], event['localization']['ra'], event['localization']['dec'], + event['localization']['ra_uncertainty'], + event['event_pval_generic'], event['event_pval_bayesian']) e+=1 - # Add skymap zoom of events - tag += ''' - -
- - '''.format(analysis['name'], analysis['name']) + if not analysis['subthreshold'] and analysis['pval_generic']<0.1: + tag2 = ''' +
+ + '''.format(analysis['analysisid'], analysis['analysisid']) + new_f[ind+1:ind+1] = [t + '\n' for t in tag2.split('\n')] - new_f[ind-1:ind-1] = [t + '\n' for t in tag.split('\n')] + new_f[ind:ind] = [t + '\n' for t in tag.split('\n')] webpage_path='/home/{}/public_html/public_FRA/gw-webpage/output/{}.html'.format(username, analysis['analysisid']) with open(webpage_path, 'w') as f: @@ -421,7 +464,7 @@ def sync_to_roc(): #subprocess.Popen('rsync -a /home/apizzuto/public_html/FastResponse/webpage/ apizzuto@roc.icecube.wisc.edu:/mnt/roc/www/internal/fast_response') env = dict(os.environ) subprocess.call(['rsync', '-a', f'/home/{username}/public_html/FastResponse/webpage/', - '{username}@roc.icecube.wisc.edu:/mnt/roc/www/internal/fast_response'], + f'{username}@roc.icecube.wisc.edu:/mnt/roc/www/internal/fast_response'], env = env ) diff --git a/html/gw_pub_templ_base.html b/html/gw_pub_templ_base.html index 7057891..9ce7365 100644 --- a/html/gw_pub_templ_base.html +++ b/html/gw_pub_templ_base.html @@ -13,7 +13,8 @@

IceCube Fast Response Analysis: ANALYSISNAME


Analysis Results

- Note: These p-values measure the consistency of the observed track-like events with the known atmospheric backgrounds for this single map (not trials corrected for multiple GW events). + Note: These p-values measure the consistency of the observed track-like events with the known atmospheric backgrounds for this single map (not trials corrected for multiple GW events). + The most probable direction is the maximum probability RA, decl. found in the generic transient search (reported when the p-value for that search is less than 10%). @@ -21,10 +22,11 @@

Analysis Results

- + - -
Name:ANALYSISNAME
Stop time (UTC):ANALYSISSTOP
N Coincident EventsNEVENTS
p-value (generic transient):UMLPVAL
significance (generic transient):UMLSIG sigma
p-value (Bayesian):LLAMAPVAL
significance (Bayesian):LLAMASIG sigma
+ + Most probable direction:MOSTPROBDIR +

Coincident Events

    @@ -38,9 +40,7 @@

    Coincident Events

-
- - +