diff --git a/sandbox_starcheck b/sandbox_starcheck index 89b9c04b..a6c5b837 100755 --- a/sandbox_starcheck +++ b/sandbox_starcheck @@ -23,12 +23,6 @@ then exit 1 fi -PERL_INLINE_DIRECTORY=`mktemp -d -t starcheck_inline.XXXXXX` || exit 1 -export PERL_INLINE_DIRECTORY perl -I ${DIR}/starcheck/src/lib ${DIR}/starcheck/src/starcheck.pl "$@" SC_STATUS=$? -if [[ -d $PERL_INLINE_DIRECTORY && $PERL_INLINE_DIRECTORY =~ .*starcheck_inline.* ]]; -then - rm -r $PERL_INLINE_DIRECTORY -fi exit $SC_STATUS diff --git a/starcheck/pcad_att_check.py b/starcheck/pcad_att_check.py index f733d370..75b1dfbb 100755 --- a/starcheck/pcad_att_check.py +++ b/starcheck/pcad_att_check.py @@ -9,10 +9,6 @@ def check_characteristics_date(ofls_characteristics_file, ref_date=None): - # de_bytetring these inputs from Perl -> Python 3 - ofls_characteristics_file = ofls_characteristics_file.decode() - if ref_date is not None: - ref_date = ref_date.decode() match = re.search(r'CHARACTERIS_(\d\d)([A-Z]{3})(\d\d)', ofls_characteristics_file) if not match: return False diff --git a/starcheck/server.py b/starcheck/server.py new file mode 100644 index 00000000..45e49f2b --- /dev/null +++ b/starcheck/server.py @@ -0,0 +1,109 @@ +import collections +import importlib +import json +import logging +import socketserver +import sys +import traceback + +from ska_helpers.logging import basic_logger + +logger = basic_logger(__name__, level="INFO") + +HOST = "localhost" +KEY = None + +func_calls = collections.Counter() + + +class PythonServer(socketserver.TCPServer): + timeout = 180 + + def handle_timeout(self) -> None: + print(f"SERVER: starcheck python server timeout after {self.timeout}s idle", + file=sys.stderr) + # self.shutdown() # DOES NOT WORK, just hangs + sys.exit(1) + + +class MyTCPHandler(socketserver.StreamRequestHandler): + """ + The request handler class for our server. + + It is instantiated once per connection to the server, and must + override the handle() method to implement communication to the + client. + """ + + def handle(self): + # self.request is the TCP socket connected to the client + data = self.rfile.readline() + logger.debug(f"SERVER receive: {data.decode('utf-8')}") + + # Decode self.data from JSON + cmd = json.loads(data) + + if cmd["key"] != KEY: + logger.ERROR(f"SERVER: bad key {cmd['key']!r}") + return + + logger.debug(f"SERVER receive func: {cmd['func']}") + logger.debug(f"SERVER receive args: {cmd['args']}") + logger.debug(f"SERVER receive kwargs: {cmd['kwargs']}") + + exc = None + + # For security reasons, only allow functions in the public API of starcheck module + if cmd["func"] == "get_server_calls": + # Sort func calls by the items in the counter + result = dict(func_calls) + else: + func_calls[cmd["func"]] += 1 + parts = cmd["func"].split(".") + package = ".".join(["starcheck"] + parts[:-1]) + func = parts[-1] + module = importlib.import_module(package) + func = getattr(module, func) + args = cmd["args"] + kwargs = cmd["kwargs"] + + try: + result = func(*args, **kwargs) + except Exception: + result = None + exc = traceback.format_exc() + + resp = json.dumps({"result": result, "exception": exc}) + logger.debug(f"SERVER send: {resp}") + + self.request.sendall(resp.encode("utf-8")) + + +def main(): + global KEY + + # Read a line from STDIN + port = int(sys.stdin.readline().strip()) + KEY = sys.stdin.readline().strip() + loglevel = sys.stdin.readline().strip() + + logmap = {'0': logging.CRITICAL, + '1': logging.WARNING, + '2': logging.INFO} + if loglevel in logmap: + logger.setLevel(logmap[loglevel]) + if int(loglevel) > 2: + logger.setLevel(logging.DEBUG) + + logger.info(f"SERVER: starting on port {port}") + + # Create the server, binding to localhost on supplied port + with PythonServer((HOST, port), MyTCPHandler) as server: + # Activate the server; this will keep running until you + # interrupt the program with Ctrl-C + while True: + server.handle_request() + + +if __name__ == "__main__": + main() diff --git a/starcheck/src/lib/Ska/Parse_CM_File.pm b/starcheck/src/lib/Ska/Parse_CM_File.pm index b8b9d937..2d2e80db 100644 --- a/starcheck/src/lib/Ska/Parse_CM_File.pm +++ b/starcheck/src/lib/Ska/Parse_CM_File.pm @@ -3,7 +3,7 @@ package Ska::Parse_CM_File; ############################################################### # -# Parse one of several types of files produced in OFLS +# Parse one of several types of files produced in OFLS # command management # # Part of the starcheck cvs project @@ -11,18 +11,14 @@ package Ska::Parse_CM_File; ############################################################### use strict; -use warnings; +use warnings; use POSIX qw( ceil); use IO::All; use Carp; +use Ska::Starcheck::Python qw(date2time time2date); -use Inline Python => q{ - -from starcheck.utils import date2time, time2date - -}; my $VERSION = '$Id$'; # ' 1; @@ -69,11 +65,11 @@ sub TLR_load_segments{ } - + return @segment_times; } - + ############################################################### @@ -103,39 +99,39 @@ sub dither { 'DITPAR' => undef); my @dh_state; - my @dh_time; my @dh_params; - # First get everything from DITHER + # First get everything from DITHER history. # Parse lines like: # 2002262.094827395 | DSDITH AODSDITH # 2002262.095427395 | ENDITH AOENDITH - my $dith_hist_fh = IO::File->new($dh_file, "r") or $dither_error = "Dither history file read err"; - while (<$dith_hist_fh>) { + # Read the last 1000 bytes in the $dh_file (guaranteed to be enough). + my $dith_hist_fh = IO::File->new($dh_file, "r") or croak "Can't open $dh_file: $!"; + $dith_hist_fh->seek(-1000, 2); + my $dh_date; + my $dh_state; + while (<$dith_hist_fh>) { if (/(\d\d\d\d)(\d\d\d)\.(\d\d)(\d\d)(\d\d) \d* \s+ \| \s+ (ENDITH|DSDITH)/x) { - my ($yr, $doy, $hr, $min, $sec, $state) = ($1,$2,$3,$4,$5,$6); - my $time = date2time("$yr:$doy:$hr:$min:$sec"); - push @dh_state, $dith_enab_cmd_map{$state}; - push @dh_time, $time; - push @dh_params, {}; - } - } + $dh_date = "$1:$2:$3:$4:$5"; + $dh_state = $6; + } + } $dith_hist_fh->close(); if (not defined $dither_error){ # If the most recent/last entry in the dither file has a timestamp newer than - # the first entry in the load, return string error and undef dither history. - if ($dh_time[-1] >= $bs_arr->[0]->{time}){ - $dither_error = "Dither history runs into load\n"; + # the first entry in the load, update the dither_error var. + if ($dh_date ge $bs_arr->[0]->{date}){ + $dither_error = "Dither history runs into load\n"; } # Confirm that last state matches kadi continuity ENAB/DISA. - # Otherwise, return string error and undef dither history. - if ($kadi_dither->{'dither'} ne $dh_state[-1]){ - $dither_error = "Dither status in kadi commands does not match DITHER history\n" - . sprintf("kadi '%s' ; History '%s' \n", - $kadi_dither->{'dither'}, $dh_state[-1]); + if ($kadi_dither->{'dither'} ne $dith_enab_cmd_map{$dh_state}){ + $dither_error .= "Dither status in kadi commands does not match DITHER history\n" + . sprintf("kadi '%s' ; History '%s' \n", + $kadi_dither->{'dither'}, + $dith_enab_cmd_map{$dh_state}); } } @@ -201,7 +197,7 @@ sub dither { ############################################################### -sub radmon { +sub radmon { ############################################################### my $h_file = shift; # Radmon history file name my $bs_arr = shift; # Backstop array reference @@ -213,7 +209,7 @@ sub radmon { my @h_state; my @h_time; my @h_date; - my %cmd = ('DS' => 'DISA', + my %cmd = ('DS' => 'DISA', 'EN' => 'ENAB'); my %obs; # First get everything from backstop @@ -228,23 +224,25 @@ sub radmon { } } } - + # Now get everything from RADMON.txt # Parse lines like: # 2012222.011426269 | ENAB OORMPEN # 2012224.051225059 | DISA OORMPDS - my $hist_fh = IO::File->new($h_file, "r") or return (undef, undef); + my $hist_fh = IO::File->new($h_file, "r") or croak "Can't open $h_file: $!"; + # Get the last 1000 characters. This is guaranteed to get the most recent + # entries that we care about. + $hist_fh->seek(-1000, 2); while (<$hist_fh>) { - if (/(\d\d\d\d)(\d\d\d)\.(\d\d)(\d\d)(\d\d) \d* \s+ \| \s+ (DISA|ENAB) \s+ (OORMPDS|OORMPEN)/x) { - my ($yr, $doy, $hr, $min, $sec, $state) = ($1,$2,$3,$4,$5,$6); - my $time = date2time("$yr:$doy:$hr:$min:$sec"); - my $date = "$yr:$doy:$hr:$min:$sec"; - push @h_date, $date; - push @h_state, $state; - push @h_time, $time; + if (/(\d\d\d\d)(\d\d\d)\.(\d\d)(\d\d)(\d\d) \d* \s+ \| \s+ (DISA|ENAB) \s+ (OORMPDS|OORMPEN)/x) { + my ($yr, $doy, $hr, $min, $sec, $state) = ($1,$2,$3,$4,$5,$6); + my $date = "$yr:$doy:$hr:$min:$sec"; + push @h_date, $date; + push @h_state, $state; } } $hist_fh->close(); + @h_time = @{date2time(\@h_date)}; my @ok = grep { $h_time[$_] < $bs_arr->[0]->{time} } (0 .. $#h_time); my @state = (@h_state[@ok], @bs_state); @@ -279,23 +277,23 @@ sub fidsel { my $bs = shift; # Reference to backstop array my $error = []; my %time_hash = (); # Hash of time stamps of fid cmds - + my @fs = (); foreach (0 .. 14) { $fs[$_] = []; } - + my ($actions, $times, $fid_time_violation) = get_fid_actions($fidsel_file, $bs); - + # Check for duplicate commanding map { $time_hash{$_}++ } @{$times}; # foreach (sort keys %time_hash) { # push @{$error}, "ERROR - $time_hash{$_} fid hardware commands at time $_\n" # if ($time_hash{$_} > 1); # } - + for (my $i = 0; $i <= $#{$times}; $i++) { - # If command contains RESET, then turn off (i.e. set tstop) any + # If command contains RESET, then turn off (i.e. set tstop) any # fid light that is on if ($actions->[$i] =~ /RESET/) { foreach my $fid (1 .. 14) { @@ -311,9 +309,9 @@ sub fidsel { push @{$error}, "Parse_cm_file::fidsel: WARNING - Could not parse $actions->[$i]"; } } - + return ($fid_time_violation, $error, \@fs); -} +} ############################################################### sub get_fid_actions { @@ -325,19 +323,20 @@ sub get_fid_actions { my @bs_time; my @fs_action; my @fs_time; - my $fidsel_fh; + my @fs_date; + my $fidsel_fh; # First get everything from backstop foreach $bs (@{$bs_arr}) { - if ($bs->{cmd} eq 'COMMAND_HW') { - my %params = %{$bs->{command}}; - if ($params{TLMSID} eq 'AFIDP') { - my $msid = $params{MSID}; - push @bs_action, "$msid FID $1 ON" if ($msid =~ /AFLC(\d+)/); - push @bs_action, "RESET" if ($msid =~ /AFLCRSET/); - push @bs_time, $bs->{time} - 10; # see comment below about timing - } - } + if ($bs->{cmd} eq 'COMMAND_HW') { + my %params = %{$bs->{command}}; + if ($params{TLMSID} eq 'AFIDP') { + my $msid = $params{MSID}; + push @bs_action, "$msid FID $1 ON" if ($msid =~ /AFLC(\d+)/); + push @bs_action, "RESET" if ($msid =~ /AFLCRSET/); + push @bs_time, $bs->{time} - 10; # see comment below about timing + } + } } # printf("first bs entry at %s, last entry at %s \n", $bs_time[0], $bs_time[-1]); @@ -347,30 +346,25 @@ sub get_fid_actions { # 2001211.190730558 | AFLCRSET RESET # 2001211.190731558 | AFLC02D1 FID 02 ON if (defined $fs_file){ - my @fidsel_text = io($fs_file)->slurp; - # take the last thousand entries if there are more than a 1000 - my @reduced_fidsel_text = @fidsel_text; - if ($#fidsel_text > 1000){ - @reduced_fidsel_text = @fidsel_text[($#fidsel_text-1000) ... $#fidsel_text]; - } -# if ($fs_file && ($fidsel_fh = new IO::File $fs_file, "r")) { - for my $fidsel_line (@reduced_fidsel_text){ - if ($fidsel_line =~ /(\d\d\d\d)(\d\d\d)\.(\d\d)(\d\d)(\d\d)\S*\s+\|\s+(AFL.+)/) { - my ($yr, $doy, $hr, $min, $sec, $action) = ($1,$2,$3,$4,$5,$6); - my $time = date2time("$yr:$doy:$hr:$min:$sec") - 10; - - if ($action =~ /(RESET|FID.+ON)/) { - # Convert to time, and subtract 10 seconds so that fid lights are - # on slightly before end of manuever. In actual commanding, they - # come on about 1-2 seconds *after*. - - push @fs_action, $action; - push @fs_time, $time; - } - } - } - -# $fidsel_fh->close(); + my $fh = IO::File->new($fs_file, "r") or croak "Can't open $fs_file: $!"; + # Last 1000 bytes of history file is sufficient + $fh->seek(-1000, 2); + while (<$fh>) { + if (/(\d\d\d\d)(\d\d\d)\.(\d\d)(\d\d)(\d\d)\S*\s+\|\s+(AFL.+)/) { + my ($yr, $doy, $hr, $min, $sec, $action) = ($1,$2,$3,$4,$5,$6); + if ($action =~ /(RESET|FID.+ON)/) { + push @fs_action, $action; + push @fs_date, "$yr:$doy:$hr:$min:$sec"; + } + } + } + + # Convert to time, and subtract 10 seconds so that fid lights are on + # slightly before end of manuever. In actual commanding, they come on about + # 1-2 seconds *after*. + my $times = date2time(\@fs_date); + @fs_time = map { $_ - 10 } @{$times}; + $fh->close(); } # printf("count of fid entries is %s \n", scalar(@fs_time)); @@ -384,9 +378,9 @@ sub get_fid_actions { # if the fid history extends into the current load if ($fs_time[-1] >= $bs_arr->[0]->{time}){ - $fid_time_violation = 1; + $fid_time_violation = 1; } - + return (\@action, \@time, $fid_time_violation); } @@ -444,21 +438,27 @@ sub backstop { my $backstop = shift; my @bs = (); + my @dates = (); open (my $BACKSTOP, $backstop) || die "Couldn't open backstop file $backstop for reading\n"; while (<$BACKSTOP>) { - my ($date, $vcdu, $cmd, $params) = split '\s*\|\s*', $_; - $vcdu =~ s/ +.*//; # Get rid of second field in vcdu - my %command = parse_params($params); - push @bs, { date => $date, + my ($date, $vcdu, $cmd, $params) = split '\s*\|\s*', $_; + $vcdu =~ s/ +.*//; # Get rid of second field in vcdu + my %command = parse_params($params); + push @bs, { date => $date, vcdu => $vcdu, cmd => $cmd, params => $params, - time => date2time($date), command => \%command, }; + push @dates, $date; } close $BACKSTOP; - + + my $times = date2time(\@dates); + for (my $i = 0; $i <= $#bs; $i++) { + $bs[$i]->{time} = $times->[$i]; + } + return @bs; } @@ -502,8 +502,8 @@ sub DOT { %{$dot{$_}} = parse_params($command{$_}); $dot{$_}{time} = date2time($dot{$_}{TIME}) if ($dot{$_}{TIME}); - # MANSTART is in the dot as a "relative" time like "000:00:00:00.000", so just pass it - # to the rel_date2time routine designed to handle that. + # MANSTART is in the dot as a "relative" time like "000:00:00:00.000", so just pass it + # to the rel_date2time routine designed to handle that. $dot{$_}{time} += rel_date2time($dot{$_}{MANSTART}) if ($dot{$_}{TIME} && $dot{$_}{MANSTART}); $dot{$_}{cmd_identifier} = "$dot{$_}{anon_param1}_$dot{$_}{anon_param2}" if ($dot{$_}{anon_param1} and $dot{$_}{anon_param2}); @@ -531,32 +531,32 @@ sub guide{ # return hash that contains # target obsid, target dec, target ra, target roll # and an array of the lines of the catalog info - + my $guide_file = shift; my %guidesumm; # Let's slurp the file instead of reading it a line at a time my $whole_guide_file = io($guide_file)->slurp; - + # And then, let's split that file into chunks by processing request # By chunking I can guarantee that an error parsing the ID doesn't cause the # script to blindly overwrite the RA and DEC and keep adding to the starcat.. my @file_chunk = split /\n\n\n\*\*\*\* PROCESSING REQUEST \*\*\*\*\n/, $whole_guide_file; - + # Skip the first block in the file (which has no catalog) by using the index 1-end for my $chunk_number (1 .. $#file_chunk){ - + # Then, for each chunk, split into a line array my @file_chunk_lines = split /\n/, $file_chunk[$chunk_number]; # Now, since my loop is chunk by chunk, I can clear these for every chunk. my ($ra, $dec, $roll); my ($oflsid, $gsumid); - + foreach my $line (@file_chunk_lines){ - + # Look for an obsid, ra, dec, or roll if ($line =~ /\s+ID:\s+([[:ascii:]]{5})\s+\((\S{3,5})\)/) { my @field = ($1, $2); @@ -568,16 +568,16 @@ sub guide{ ($oflsid = $1) =~ s/^0*//; $oflsid =~ s/00$//; } - + # Skip the rest of the block for each line if # oflsid hasn't been found/defined - + next unless (defined $oflsid); - + if ($line =~ /\s+RA:\s*([^ ]+) DEG/){ $ra = $1; $guidesumm{$oflsid}{ra}=$ra; - } + } if ($line =~ /\s+DEC:\s*([^ ]+) DEG/){ $dec = $1; $guidesumm{$oflsid}{dec}=$dec; @@ -586,10 +586,10 @@ sub guide{ $roll = $1; $guidesumm{$oflsid}{roll}=$roll; } - + if ($line =~ /^(FID|ACQ|GUI|BOT)/) { push @{$guidesumm{$oflsid}{info}}, $line; - + } if ($line =~ /^MON/){ my @l= split ' ', $line; @@ -602,9 +602,9 @@ sub guide{ } } } - + return %guidesumm; - + } @@ -615,10 +615,10 @@ sub OR { my %or; my %obs; my $obs; - - + + open (my $OR, $or_file) || die "Couldn't open OR file $or_file\n"; - my $in_obs_statement = 0; + my $in_obs_statement = 0; while (<$OR>) { chomp; if ($in_obs_statement) { @@ -649,7 +649,7 @@ sub OR_parse_obs { my %obs = (); # print STDERR "In OR_Parse_obs \n"; foreach (@obs_columns) { - $obs{$_} = ''; + $obs{$_} = ''; } ($obs{TARGET_RA}, $obs{TARGET_DEC}) = (0.0, 0.0); ($obs{TARGET_OFFSET_Y}, $obs{TARGET_OFFSET_Z}) = (0.0, 0.0); @@ -687,7 +687,7 @@ sub OR_parse_obs { ############################################################### sub PS { # Parse processing summary -# Actually, just read in the juicy lines in the middle +# Actually, just read in the juicy lines in the middle # which are maneuvers or observations and store them # to a line array ############################################################### @@ -779,10 +779,10 @@ sub MM { $manvr_hash{tstop} = date2time($manvr_hash{stop_date}); # let's just add those 10 seconds to the summary tstart so it lines up with - # AOMANUVR in backstop + # AOMANUVR in backstop $manvr_hash{tstart} += $manvr_offset; $manvr_hash{start_date} = time2date($manvr_hash{tstart}); - + # clean up obsids (remove prepended 0s) if (defined $manvr_hash{initial_obsid}) { $manvr_hash{initial_obsid} =~ s/^0+//; @@ -806,7 +806,7 @@ sub MM { } # create a manvr_dest key to record the eventual destination of - # all manvrs. + # all manvrs. for my $i (0 .. $#mm_array){ # by default the destination is just the final_obsid $mm_array[$i]->{manvr_dest} = $mm_array[$i]->{final_obsid}; @@ -885,7 +885,7 @@ sub mechcheck { $evt{val} = $2; $evt{from}= $1; } - + push @mc, { %evt } if ($evt{var}); } close $MC; @@ -901,15 +901,15 @@ sub SOE { # read the SOE record formats into the hashes of lists $fld and $len my (%fld, %len, $obsidx); - my (%rlen, %templ); - while () { + my (%rlen, %templ); + while () { my ($rtype,$rfld,$rlen,$rdim1,$rdim2) = split; - for my $j (0 .. $rdim2-1) { + for my $j (0 .. $rdim2-1) { for my $i (0 .. $rdim1-1) { my $idx = ''; $idx .= "[$i]" if ($rdim1 > 1); $idx .= "[$j]" if ($rdim2 > 1); - push @{ $fld{$rtype} },$rfld.$idx; + push @{ $fld{$rtype} },$rfld.$idx; push @{ $len{$rtype} },$rlen; } } @@ -947,7 +947,7 @@ sub SOE { die "Parse_CM_File::SOE: Cannot identify record of type $typ\n "; } } - + return %SOE; } @@ -983,7 +983,7 @@ sub odb { close $ODB; return (%odb); -} +} diff --git a/starcheck/src/lib/Ska/Starcheck/Obsid.pm b/starcheck/src/lib/Ska/Starcheck/Obsid.pm index 20803f20..72f2c713 100644 --- a/starcheck/src/lib/Ska/Starcheck/Obsid.pm +++ b/starcheck/src/lib/Ska/Starcheck/Obsid.pm @@ -20,214 +20,7 @@ package Ska::Starcheck::Obsid; use strict; use warnings; - -use Inline Python => q{ -import numpy as np -from astropy.table import Table - -from starcheck.utils import time2date, date2time, de_bytestr -from mica.archive import aca_dark -from chandra_aca.star_probs import guide_count -from chandra_aca.transform import (yagzag_to_pixels, pixels_to_yagzag, - count_rate_to_mag, mag_to_count_rate) -import Quaternion -from Ska.quatutil import radec2yagzag -import agasc - -from proseco.core import ACABox -from proseco.catalog import get_effective_t_ccd -from proseco.guide import get_imposter_mags -import proseco.characteristics as char - -import mica.stats.acq_stats -import mica.stats.guide_stats - -ACQS = mica.stats.acq_stats.get_stats() -GUIDES = mica.stats.guide_stats.get_stats() - -def _get_aca_limits(): - return float(char.aca_t_ccd_planning_limit), float(char.aca_t_ccd_penalty_limit) - -def _pixels_to_yagzag(i, j): - """ - Call chandra_aca.transform.pixels_to_yagzag. - This wrapper is set to pass allow_bad=True, as exceptions from the Python side - in this case would not be helpful, and the very small bad pixel list should be - on the CCD. - :params i: pixel row - :params j: pixel col - :returns tuple: yag, zag as floats - """ - yag, zag = pixels_to_yagzag(i, j, allow_bad=True) - return float(yag), float(zag) - - -def _yagzag_to_pixels(yag, zag): - """ - Call chandra_aca.transform.yagzag_to_pixels. - This wrapper is set to pass allow_bad=True, as exceptions from the Python side - in this case would not be helpful, and the boundary checks and such will work fine - on the Perl side even if the returned row/col is off the CCD. - :params yag: y-angle arcsecs (hopefully as a number from the Perl) - :params zag: z-angle arcsecs (hopefully as a number from the Perl) - :returns tuple: row, col as floats - """ - row, col = yagzag_to_pixels(yag, zag, allow_bad=True) - return float(row), float(col) - - -def _guide_count(mags, t_ccd, count_9th=False): - eff_t_ccd = get_effective_t_ccd(t_ccd) - return float(guide_count(np.array(mags), eff_t_ccd, count_9th)) - -def check_hot_pix(idxs, yags, zags, mags, types, t_ccd, date, dither_y, dither_z): - """ - Return a list of info to make warnings on guide stars or fid lights with local dark map that gives an - 'imposter_mag' that could perturb a centroid. The potential worst-case offsets (ignoring effects - at the background pixels) are returned and checking against offset limits needs to be done - from calling code. - - This fetches the dark current before the date of the observation and passes it to - proseco.get_imposter_mags with the star candidate positions to fetch the brightest - 2x2 for each and calculates the mag for that region. The worse case offset is then - added to an entry for the star index. - - :param idxs: catalog indexes as list or array - :param yags: catalog yangs as list or array - :param zags: catalog zangs as list or array - :param mags: catalog mags (AGASC mags for stars estimated fid mags for fids) list or array - :param types: catalog TYPE (ACQ|BOT|FID|MON|GUI) as list or array - :param t_ccd: observation t_ccd in deg C (should be max t_ccd in guide phase) - :param date: observation date (bytestring via Inline) - :param dither_y: dither_y in arcsecs (guide dither) - :param dither_z: dither_z in arcsecs (guide dither) - :param yellow_lim: yellow limit centroid offset threshold limit (in arcsecs) - :param red_lim: red limit centroid offset threshold limit (in arcsecs) - :return imposters: list of dictionaries with keys that define the index, the imposter mag, - a 'status' key that has value 0 if the code to get the imposter mag ran successfully, - calculated centroid offset, and star or fid info to make a warning. - """ - - types = [t.decode('ascii') for t in types] - date = date.decode('ascii') - eff_t_ccd = get_effective_t_ccd(t_ccd) - - dark = aca_dark.get_dark_cal_image(date=date, t_ccd_ref=eff_t_ccd, aca_image=True) - - - def imposter_offset(cand_mag, imposter_mag): - """ - For a given candidate star and the pseudomagnitude of the brightest 2x2 imposter - calculate the max offset of the imposter counts are at the edge of the 6x6 - (as if they were in one pixel). This is somewhat the inverse of proseco.get_pixmag_for_offset - """ - cand_counts = mag_to_count_rate(cand_mag) - spoil_counts = mag_to_count_rate(imposter_mag) - return spoil_counts * 3 * 5 / (spoil_counts + cand_counts) - - imposters = [] - for idx, yag, zag, mag, ctype in zip(idxs, yags, zags, mags, types): - if ctype in ['BOT', 'GUI', 'FID']: - if ctype in ['BOT', 'GUI']: - dither = ACABox((dither_y, dither_z)) - else: - dither = ACABox((5.0, 5.0)) - row, col = yagzag_to_pixels(yag, zag, allow_bad=True) - # Handle any errors in get_imposter_mags with a try/except. This doesn't - # try to pass back a message. Most likely this will only fail if the star - # or fid is completely off the CCD and will have other warning. - try: - # get_imposter_mags takes a Table of candidates as its first argument, so construct - # a single-candidate table `entries` - entries = Table([{'idx': idx, 'row': row, 'col': col, 'mag': mag, 'type': ctype}]) - imp_mags, imp_rows, imp_cols = get_imposter_mags(entries, dark, dither) - offset = imposter_offset(mag, imp_mags[0]) - imposters.append({'idx': int(idx), 'status': int(0), - 'entry_row': float(row), 'entry_col': float(col), - 'bad2_row': float(imp_rows[0]), 'bad2_col': float(imp_cols[0]), - 'bad2_mag': float(imp_mags[0]), 'offset': float(offset)}) - except: - imposters.append({'idx': int(idx), 'status': int(1)}) - return imposters - - -def _get_agasc_stars(ra, dec, roll, radius, date, agasc_file): - """ - Fetch the cone of agasc stars. Update the table with the yag and zag of each star. - Return as a dictionary with the agasc ids as keys and all of the values as - simple Python types (int, float) - """ - stars = agasc.get_agasc_cone(float(ra), float(dec), float(radius), date.decode('ascii'), - agasc_file.decode('ascii')) - q_aca = Quaternion.Quat([float(ra), float(dec), float(roll)]) - yags, zags = radec2yagzag(stars['RA_PMCORR'], stars['DEC_PMCORR'], q_aca) - yags *= 3600 - zags *= 3600 - stars['yang'] = yags - stars['zang'] = zags - - # Get a dictionary of the stars with the columns that are used - # This needs to be de-numpy-ified to pass back into Perl - stars_dict = {} - for star in stars: - stars_dict[str(star['AGASC_ID'])] = { - 'id': int(star['AGASC_ID']), - 'class': int(star['CLASS']), - 'ra': float(star['RA_PMCORR']), - 'dec': float(star['DEC_PMCORR']), - 'mag_aca': float(star['MAG_ACA']), - 'bv': float(star['COLOR1']), - 'color1': float(star['COLOR1']), - 'mag_aca_err': float(star['MAG_ACA_ERR']), - 'poserr': float(star['POS_ERR']), - 'yag': float(star['yang']), - 'zag': float(star['zang']), - 'aspq': int(star['ASPQ1']), - 'var': int(star['VAR']), - 'aspq1': int(star['ASPQ1'])} - - return stars_dict - - -def get_mica_star_stats(agasc_id, time): - """ - Get the acq and guide star statistics for a star before a given time. - The time filter is just there to make this play well when run in regression. - The mica acq and guide stats are fetched into globals ACQS and GUIDES - and this method just filters for the relevant ones for a star and returns - a dictionary of summarized statistics. - - :param agasc_id: agasc id of star - :param time: time used as end of range to retrieve statistics. - - :return: dictionary of stats for the observed history of the star - """ - - # Cast the inputs - time = float(time) - agasc_id = int(agasc_id) - - acqs = Table(ACQS[(ACQS['agasc_id'] == agasc_id) - & (ACQS['guide_tstart'] < time)]) - ok = acqs['img_func'] == 'star' - guides = Table(GUIDES[(GUIDES['agasc_id'] == agasc_id) - & (GUIDES['kalman_tstart'] < time)]) - mags = np.concatenate( - [acqs['mag_obs'][acqs['mag_obs'] != 0], - guides['aoacmag_mean'][guides['aoacmag_mean'] != 0]]) - - avg_mag = float(np.mean(mags)) if (len(mags) > 0) else float(13.94) - stats = {'acq': len(acqs), - 'acq_noid': int(np.count_nonzero(~ok)), - 'gui' : len(guides), - 'gui_bad': int(np.count_nonzero(guides['f_track'] < .95)), - 'gui_fail': int(np.count_nonzero(guides['f_track'] < .01)), - 'gui_obc_bad': int(np.count_nonzero(guides['f_obc_bad'] > .05)), - 'avg_mag': avg_mag} - return stats - -}; - +use Ska::Starcheck::Python qw(date2time time2date call_python); use List::Util qw(min max); use Quat; @@ -331,7 +124,8 @@ sub set_config { # Set the ACA planning (red) and penalty (yellow) limits. Under the hood # this uses proseco to get the relevant values from the current release of # the ACA thermal model in chandra_models. - ($config{'ccd_temp_red_limit'}, $config{'ccd_temp_yellow_limit'}) = _get_aca_limits(); + my $vals = call_python("utils._get_aca_limits"); + ($config{'ccd_temp_red_limit'}, $config{'ccd_temp_yellow_limit'}) = @$vals; } @@ -350,17 +144,14 @@ sub set_ACA_bad_pixels { my @tmp = io($pixel_file)->slurp; my @lines = grep { /^\s+(\d|-)/ } @tmp; foreach (@lines) { - my @line = split /;|,/, $_; - foreach my $i ($line[0]..$line[1]) { - foreach my $j ($line[2]..$line[3]) { - my $pixel = {'row' => $i, + my @line = split /;|,/, $_; + foreach my $i ($line[0]..$line[1]) { + foreach my $j ($line[2]..$line[3]) { + my $pixel = {'row' => $i, 'col' => $j}; - my ($yag,$zag) = _pixels_to_yagzag($i, $j); - $pixel->{yag} = $yag; - $pixel->{zag} = $zag; - push @bad_pixels, $pixel; - } - } + push @bad_pixels, $pixel; + } + } } print STDERR "Read ", ($#bad_pixels+1), " ACA bad pixels from $pixel_file\n"; @@ -1114,7 +905,8 @@ sub check_bright_perigee{ } # Pass 1 to _guide_count as third arg to use the count_9th mode - my $bright_count = sprintf("%.1f", _guide_count(\@mags, $self->{ccd_temp}, 1)); + my $bright_count = sprintf("%.1f", + call_python("utils._guide_count", [\@mags, $self->{ccd_temp}, 1])); if ($bright_count < $min_n_stars){ push @{$self->{warn}}, "$bright_count star(s) brighter than scaled 9th mag. " . "Perigee requires at least $min_n_stars\n"; @@ -1323,8 +1115,10 @@ sub check_star_catalog { } # Run the hot pixel region check on the Python side on FID|GUI|BOT - my @imposters = check_hot_pix(\@idxs, \@yags, \@zags, \@mags, \@types, - $self->{ccd_temp}, $self->{date}, $dither_guide_y, $dither_guide_z); + my @imposters = @{call_python( + "utils.check_hot_pix", + [\@idxs, \@yags, \@zags, \@mags, \@types, + $self->{ccd_temp}, $self->{date}, $dither_guide_y, $dither_guide_z]);}; # Assign warnings based on those hot pixel region checks IMPOSTER: @@ -1488,7 +1282,7 @@ sub check_star_catalog { # Star/fid outside of CCD boundaries # ACA-019 ACA-020 ACA-021 - my ($pixel_row, $pixel_col) = _yagzag_to_pixels($yag, $zag); + my ($pixel_row, $pixel_col) = @{call_python("utils._yagzag_to_pixels", [$yag, $zag])}; # Set "acq phase" dither to acq dither or 20.0 if undefined my $dither_acq_y = $self->{dither_acq}->{ampl_y} or 20.0; @@ -1644,8 +1438,8 @@ sub check_star_catalog { my @dr; if ($type =~ /GUI|BOT/){ foreach my $pixel (@bad_pixels) { - my $dy = abs($yag-$pixel->{yag}); - my $dz = abs($zag-$pixel->{zag}); + my $dy = abs($pixel_row-$pixel->{row}) * 5; + my $dz = abs($pixel_col-$pixel->{col}) * 5; my $dr = sqrt($dy**2 + $dz**2); next unless ($dz < $self->{dither_guide}->{ampl_p} + 25 and $dy < $self->{dither_guide}->{ampl_y} + 25); push @close_pixels, sprintf(" row, col (%d, %d), dy, dz (%d, %d) \n", @@ -2412,12 +2206,10 @@ sub get_agasc_stars { return unless ($c = find_command($self, "MP_TARGQUAT")); # Use Python agasc to fetch the stars into a hash - $self->{agasc_hash} = _get_agasc_stars($self->{ra}, - $self->{dec}, - $self->{roll}, - 1.3, - $self->{date}, - $agasc_file); + $self->{agasc_hash} = call_python( + "utils._get_agasc_stars", + [$self->{ra}, $self->{dec}, $self->{roll}, 1.3, $self->{date}, $agasc_file] + ); foreach my $star (values %{$self->{agasc_hash}}) { if ($star->{'mag_aca'} < -10 or $star->{'mag_aca_err'} < -10) { @@ -2542,7 +2334,7 @@ sub star_dbhist { my $obs_tstart_minus_day = $obs_tstart - 86400; - return get_mica_star_stats($star_id, $obs_tstart_minus_day); + return call_python("utils.get_mica_star_stats", [$star_id, $obs_tstart_minus_day]); } @@ -2552,7 +2344,8 @@ sub star_image_map { my $self = shift; my $c; return unless ($c = find_command($self, 'MP_STARCAT')); - return unless ((defined $self->{ra}) and (defined $self->{dec}) and (defined $self->{roll})); my $obsid = $self->{obsid}; + return unless ((defined $self->{ra}) and (defined $self->{dec}) and (defined $self->{roll})); + my $obsid = $self->{obsid}; # a hash of the agasc ids we want to plot my %plot_ids; @@ -2581,20 +2374,29 @@ sub star_image_map { # top left +54+39 # 2900x2900 my $pix_scale = 330 / (2900. * 2); + + # Convert all the yag/zags to pixel rows/cols + my @yags = map { $self->{agasc_hash}->{$_}->{yag} } keys %plot_ids; + my @zags = map { $self->{agasc_hash}->{$_}->{zag} } keys %plot_ids; + my ($pix_rows, $pix_cols) = @{call_python("utils._yagzag_to_pixels", [\@yags, \@zags])}; + my $map = " \n"; - for my $star_id (keys %plot_ids){ + my @star_ids = keys %plot_ids; + for my $idx (0 .. $#star_ids) { + my $star_id = $star_ids[$idx]; + my $pix_row = $pix_rows->[$idx]; + my $pix_col = $pix_cols->[$idx]; my $cat_star = $self->{agasc_hash}->{$star_id}; my $sid = $cat_star->{id}; my $yag = $cat_star->{yag}; my $zag = $cat_star->{zag}; - my ($pix_row, $pix_col) = _yagzag_to_pixels($yag, $zag); my $image_x = 54 + ((2900 - $yag) * $pix_scale); my $image_y = 39 + ((2900 - $zag) * $pix_scale); my $star = '" . sprintf("yag,zag=%.2f,%.2f
", $yag, $zag) - . "row,col=$pix_row,$pix_col
" + . sprintf("row,col=%.2f,%.2f
", $pix_row,$pix_col) . sprintf("mag_aca=%.2f
", $cat_star->{mag_aca}) . sprintf("mag_aca_err=%.2f
", $cat_star->{mag_aca_err} / 100.0) . sprintf("class=%s
", $cat_star->{class}) @@ -2669,7 +2471,7 @@ sub count_guide_stars{ push @mags, $mag; } } - return sprintf("%.1f", _guide_count(\@mags, $self->{ccd_temp})); + return sprintf("%.1f", call_python("utils._guide_count", [\@mags, $self->{ccd_temp}])); } @@ -2707,12 +2509,14 @@ sub set_ccd_temps{ # Add info for having a penalty temperature too if ($self->{ccd_temp} > $config{ccd_temp_yellow_limit}){ push @{$self->{fyi}}, sprintf("Effective guide temperature %.1f C\n", - get_effective_t_ccd($self->{ccd_temp})); + call_python("utils.get_effective_t_ccd", + [$self->{ccd_temp}])); } if ($self->{ccd_temp_acq} > $config{ccd_temp_yellow_limit}){ push @{$self->{fyi}}, sprintf("Effective acq temperature %.1f C\n", - get_effective_t_ccd($self->{ccd_temp_acq})); + call_python("utils.get_effective_t_ccd", + [$self->{ccd_temp_acq}])); } # Clip the acq ccd temperature to the calibrated range of the grid acq probability model @@ -2727,60 +2531,6 @@ sub set_ccd_temps{ } } -use Inline Python => q{ - -from proseco.catalog import get_aca_catalog - - -def proseco_probs(kwargs): - """ - Call proseco's get_acq_catalog with the parameters supplied in `kwargs` for a specific obsid catalog - and return the individual acq star probabilities, the P2 value for the catalog, and the expected - number of acq stars. - - `kwargs` will be a Perl hash converted to dict (by Inline) of the expected keyword params. These keys - must be defined: - - 'q1', 'q2', 'q3', 'q4' = the target quaternion - 'man_angle' the maneuver angle to the target quaternion in degrees. - 'acq_ids' list of acq star ids - 'halfwidths' list of acq star halfwidths in arcsecs - 't_ccd_acq' acquisition temperature in deg C - 'date' observation date (in Chandra.Time compatible format) - 'detector' science detector - 'sim_offset' SIM offset - - As these values are from a Perl hash, bytestrings will be converted by de_bytestr early in this method. - - :param kwargs: dict of expected keywords - :return tuple: (list of floats of star acq probabilties, float P2, float expected acq stars) - - """ - - kw = de_bytestr(kwargs) - args = dict(obsid=0, - att=Quaternion.normalize(kw['att']), - date=kw['date'], - n_acq=kw['n_acq'], - man_angle=kw['man_angle'], - t_ccd_acq=kw['t_ccd_acq'], - t_ccd_guide=kw['t_ccd_guide'], - dither_acq=ACABox(kw['dither_acq']), - dither_guide=ACABox(kw['dither_guide']), - include_ids_acq=kw['include_ids_acq'], - include_halfws_acq=kw['include_halfws_acq'], - detector=kw['detector'], sim_offset=kw['sim_offset'], - n_fid=0, n_guide=0, focus_offset=0) - aca = get_aca_catalog(**args) - acq_cat = aca.acqs - - # Assign the proseco probabilities back into an array. - p_acqs = [float(acq_cat['p_acq'][acq_cat['id'] == acq_id][0]) for acq_id in kw['include_ids_acq']] - - return p_acqs, float(-np.log10(acq_cat.calc_p_safe())), float(np.sum(p_acqs)) -}; - - ################################################################################### sub proseco_args{ ################################################################################### @@ -2890,7 +2640,7 @@ sub set_proseco_probs_and_check_P2{ if (not %{$args}){ return; } - my ($p_acqs, $P2, $expected) = proseco_probs($args); + my ($p_acqs, $P2, $expected) = @{call_python("utils.proseco_probs", [], $args)}; $P2 = sprintf("%.1f", $P2); @@ -2932,23 +2682,6 @@ sub set_proseco_probs_and_check_P2{ } - -use Inline Python => q{ - -from chandra_aca.star_probs import mag_for_p_acq - -def _mag_for_p_acq(p_acq, date, t_ccd): - """ - Call mag_for_p_acq, but cast p_acq and t_ccd as floats (may or may not be needed) and - convert date from a bytestring (from the Perl interface). - """ - eff_t_ccd = get_effective_t_ccd(t_ccd) - return mag_for_p_acq(float(p_acq), date.decode(), float(eff_t_ccd)) - -}; - - - sub set_dynamic_mag_limits{ # Use the t_ccd at time of acquistion and time to set the mag limits corresponding to the the magnitude # for a 75% acquisition succes (yellow limit) and a 50% acquisition success (red limit) @@ -2960,7 +2693,9 @@ sub set_dynamic_mag_limits{ my $t_ccd = $self->{ccd_temp_acq}; # Dynamic mag limits based on 75% and 50% chance of successful star acq # Maximum limits of 10.3 and 10.6 - $self->{mag_faint_yellow} = min(10.3, _mag_for_p_acq(0.75, $date, $t_ccd)); - $self->{mag_faint_red} = min(10.6, _mag_for_p_acq(0.5, $date, $t_ccd)); + $self->{mag_faint_yellow} = min(10.3, call_python( + "utils._mag_for_p_acq", [0.75, $date, $t_ccd])); + $self->{mag_faint_red} = min(10.6, call_python( + "utils._mag_for_p_acq", [0.5, $date, $t_ccd])); } diff --git a/starcheck/src/lib/Ska/Starcheck/Python.pm b/starcheck/src/lib/Ska/Starcheck/Python.pm new file mode 100644 index 00000000..a83363f0 --- /dev/null +++ b/starcheck/src/lib/Ska/Starcheck/Python.pm @@ -0,0 +1,103 @@ +package Ska::Starcheck::Python; + +use strict; +use warnings; +use IO::Socket; +use JSON; +use Carp qw(confess); +use Data::Dumper; + +use vars qw($VERSION @ISA @EXPORT @EXPORT_OK %EXPORT_TAGS); +require Exporter; + +our @ISA = qw(Exporter); +our @EXPORT = qw(); +our @EXPORT_OK = qw(call_python date2time time2date set_port set_key); +%EXPORT_TAGS = ( all => \@EXPORT_OK ); + +STDOUT->autoflush(1); + +$Data::Dumper::Terse = 1; + +my $HOST = "localhost"; +my $PORT = 40000; +my $KEY = "fff"; +my $VERBOSE = 1; + +sub set_port { + $PORT = shift; +} + +sub set_key { + $KEY = shift; +} + +sub set_debug { + $VERBOSE = shift; +} + +sub call_python { + my $func = shift; + my $args = shift; + my $kwargs = shift; + if (!defined $args) { $args = []; } + if (!defined $kwargs) { $kwargs = {}; } + + my $command = { + "func" => $func, + "args" => $args, + "kwargs" => $kwargs, + "key" => $KEY, + }; + my $command_json = encode_json $command; + + if ($VERBOSE gt 2){ + print STDERR "CLIENT: Sending command $command_json\n"; + } + my $handle; + my $iter = 0; + while ($iter++ < 10) { + $handle = IO::Socket::INET->new(Proto => "tcp", + PeerAddr => $HOST, + PeerPort => $PORT); + last if defined($handle); + sleep 1; + } + if (!defined($handle)) { + die "Unable to connect to port $PORT on $HOST: $!"; + } + $handle->autoflush(1); # so output gets there right away + $handle->write("$command_json\n"); + + my $response = <$handle>; + $handle->close(); + + my $data = decode_json $response; + if ($VERBOSE gt 2){ + print STDERR "CLIENT: Got response: $response\n"; + print STDERR Dumper($data); + } + if (defined $data->{exception}) { + my $msg = "\nPython exception:\n"; + $msg .= "command = " . Dumper($command) . "\n"; + $msg .= "$data->{exception}\n"; + Carp::confess $msg; + } + + return $data->{result}; +} + + +sub date2time { + my $date = shift; + # print "date2time: $date\n"; + return call_python("utils.date2time", [$date]); +} + + +sub time2date { + my $time = shift; + # print "time2date: $time\n"; + return call_python("utils.time2date", [$time]); +} + diff --git a/starcheck/src/starcheck b/starcheck/src/starcheck index a2a8f70b..bbc7cd1c 100755 --- a/starcheck/src/starcheck +++ b/starcheck/src/starcheck @@ -29,13 +29,7 @@ then exit 1 fi -PERL_INLINE_DIRECTORY=`mktemp -d -t starcheck_inline.XXXXXX` || exit 1 -export PERL_INLINE_DIRECTORY STARCHECK=`python -c 'import starcheck; print(starcheck.__path__[0])'` perl -I ${STARCHECK}/src/lib ${STARCHECK}/src/starcheck.pl "$@" SC_STATUS=$? -if [[ -d $PERL_INLINE_DIRECTORY && $PERL_INLINE_DIRECTORY =~ .*starcheck_inline.* ]]; -then - rm -r $PERL_INLINE_DIRECTORY -fi exit $SC_STATUS diff --git a/starcheck/src/starcheck.pl b/starcheck/src/starcheck.pl index c9a20a19..66b5f059 100755 --- a/starcheck/src/starcheck.pl +++ b/starcheck/src/starcheck.pl @@ -12,9 +12,11 @@ use strict; use warnings; +use Data::Dumper; use Getopt::Long; use IO::File; use IO::All; +use IO::Socket::INET; use Sys::Hostname; use English; use File::Basename; @@ -24,6 +26,7 @@ use PoorTextFormat; use Ska::Starcheck::Obsid; +use Ska::Starcheck::Python qw(date2time time2date call_python); use Ska::Parse_CM_File; use Carp; use YAML; @@ -32,36 +35,9 @@ use Cwd qw( abs_path ); use HTML::TableExtract; - use Carp 'verbose'; $SIG{ __DIE__ } = sub { Carp::confess( @_ )}; - -use Inline Python => q{ - -import os -import traceback - -from starcheck.pcad_att_check import check_characteristics_date -from starcheck.utils import (_make_pcad_attitude_check_report, - plot_cat_wrapper, - date2time, time2date, - config_logging, - set_kadi_scenario_default, - ccd_temp_wrapper, - starcheck_version, get_data_dir, - get_chandra_models_version, - get_dither_kadi_state, - get_run_start_time, - get_kadi_scenario, get_cheta_source, - make_ir_check_report) - -}; - - -my $version = starcheck_version(); - - # Set some global vars with directory locations my $SKA = $ENV{SKA} || '/proj/sot/ska'; @@ -75,6 +51,7 @@ fid_char => "fid_CHARACTERISTICS", verbose => 1, maude => 0, + max_obsids => 0, ); @@ -94,14 +71,49 @@ 'config_file=s', 'run_start_time=s', 'maude!', + 'max_obsids:i', ) || exit( 1 ); +usage( 1 ) + if $par{help}; + -my $Starcheck_Data = $par{sc_data} || get_data_dir(); -my $STARCHECK = $par{out} || ($par{vehicle} ? 'v_starcheck' : 'starcheck'); +my $sock = IO::Socket::INET->new( + LocalAddr => '', LocalPort => 0, Proto => 'tcp', Listen => 1); +my $server_port = $sock->sockport(); +close($sock); + +# Generate a 16-character random string of letters and numbers that gets used +# as a key to authenticate the client to the server. +my $server_key = join '', map +(0..9,'a'..'z','A'..'Z')[rand 62], 1..16; + +# Configure the Python interface +Ska::Starcheck::Python::set_port($server_port); +Ska::Starcheck::Python::set_key($server_key); +Ska::Starcheck::Python::set_debug($par{verbose}); +if ($par{verbose} gt 1){ + print STDERR "CLIENT: starcheck.server started on port $server_port\n"; + print STDERR "CLIENT: starcheck.server key $server_key\n"; +} +# Start a server that can call functions in the starcheck package +my $pid = open(SERVER, "| python -m starcheck.server"); +SERVER->autoflush(1); +# Send the port, key, and verbosity to the server +print SERVER "$server_port\n"; +print SERVER "$server_key\n"; +print SERVER "$par{verbose}\n"; +# DEBUG, limit number of obsids. +# Set to undef for no limit (though option defaults to 0) +my $MAX_OBSIDS = $par{max_obsids} > 0 ? $par{max_obsids} : undef; + +my $version = call_python("utils.starcheck_version"); + + +my $Starcheck_Data = $par{sc_data} || call_python("utils.get_data_dir"); +my $STARCHECK = $par{out} || ($par{vehicle} ? 'v_starcheck' : 'starcheck'); my $empty_font_start = qq{}; my $red_font_start = qq{}; @@ -110,15 +122,13 @@ my $blue_font_start = qq{}; my $font_stop = qq{}; -usage( 1 ) - if $par{help}; # kadi log levels are a little different and INFO (corresponding to the default # verbose=1) is too chatty for the default. Instead allow only verbose=0 # (CRITICAL) or verbose=2 (DEBUG). -my $kadi_verbose = $par{verbose} eq '2' ? '2' : '0'; -config_logging($STARCHECK, $kadi_verbose, "kadi"); -set_kadi_scenario_default(); +my $kadi_verbose = $par{verbose} gt 1 ? '2' : '0'; +call_python("utils.config_logging", [$STARCHECK, $kadi_verbose, "kadi"]); +call_python("utils.set_kadi_scenario_default"); # Find backstop, guide star summary, OR, and maneuver files. my %input_files = (); @@ -189,6 +199,8 @@ my $ACA_badpix_date; my $ACA_badpix_firstline = io($ACA_bad_pixel_file)->getline; +Ska::Starcheck::Obsid::set_config($config_ref); + if ($ACA_badpix_firstline =~ /Bad Pixel.*\d{7}\s+\d{7}\s+(\d{7}).*/ ){ $ACA_badpix_date = $1; print STDERR "Using ACABadPixel file from $ACA_badpix_date Dark Cal \n"; @@ -211,6 +223,7 @@ # First read the Backstop file, and split into components +print "Reading backstop file $backstop\n"; my @bs = Ska::Parse_CM_File::backstop($backstop); my $i = 0; @@ -223,6 +236,7 @@ } # Read DOT, which is used to figure out the Obsid for each command +print "Reading DOT file $dot_file\n"; my ($dot_ref, $dot_touched_by_sausage) = Ska::Parse_CM_File::DOT($dot_file) if ($dot_file); my %dot = %{$dot_ref}; @@ -231,23 +245,29 @@ # print STDERR "$dotkey $dot{$dotkey}{cmd_identifier} $dot{$dotkey}{anon_param3} $dot{$dotkey}{anon_param4} \n"; #} +print "Reading TLR file $tlr_file\n"; my @load_segments = Ska::Parse_CM_File::TLR_load_segments($tlr_file); +print "Reading MM file $mm_file\n"; # Read momentum management (maneuvers + SIM move) summary file my %mm = Ska::Parse_CM_File::MM({file => $mm_file, ret_type => 'hash'}) if ($mm_file); # Read maneuver management summary for handy obsid time checks +print "Reading process summary $ps_file\n"; my @ps = Ska::Parse_CM_File::PS($ps_file) if ($ps_file); # Read mech check file and parse +print "Reading mech check file $mech_file\n"; my @mc = Ska::Parse_CM_File::mechcheck($mech_file) if ($mech_file); # Read OR file and integrate into %obs +print "Reading OR file $or_file\n"; my %or = Ska::Parse_CM_File::OR($or_file) if ($or_file); # Read FIDSEL (fid light) history file and ODB (for fid # characteristics) and parse; use fid_time_violation later (when global_warn set up +print "Reading FIDSEL file $fidsel_file\n"; my ($fid_time_violation, $error, $fidsel) = Ska::Parse_CM_File::fidsel($fidsel_file, \@bs) ; map { warning("$_\n") } @{$error}; @@ -268,9 +288,9 @@ Ska::Starcheck::Obsid::set_odb(%odb); -Ska::Starcheck::Obsid::set_config($config_ref); # Read Maneuver error file containing more accurate maneuver errors +print "Reading Maneuver Error file $manerr_file\n"; my @manerr; if ($manerr_file) { @manerr = Ska::Parse_CM_File::man_err($manerr_file); @@ -282,11 +302,14 @@ # in review, and the RUNNING_LOAD_TERMINATION_TIME backstop "pseudo" command is available, that # command will be the first command ($bs[0]) and the kadi dither state will be fetched at that time. # This is expected and appropriate. -my $kadi_dither = get_dither_kadi_state($bs[0]->{date}); +print "Getting dither state from kadi at $bs[0]->{date} \n"; +my $kadi_dither = call_python("utils.get_dither_kadi_state", [$bs[0]->{date}]); # Read DITHER history file and backstop to determine expected dither state +print "Reading DITHER file $dither_file\n"; my ($dither_error, $dither) = Ska::Parse_CM_File::dither($dither_file, \@bs, $kadi_dither); +print "Reading RADMON file $radmon_file\n"; my ($radmon_time_violation, $radmon) = Ska::Parse_CM_File::radmon($radmon_file, \@bs); # if dither history runs into load or kadi mismatch @@ -339,6 +362,7 @@ my $obsid; my %obs; my @obsid_id; +my $n_obsid = 0; for my $i (0 .. $#cmd) { # Get obsid (aka ofls_id) for this cmd by matching up with corresponding # commands from DOT. Returns undef if it isn't "interesting" @@ -347,8 +371,9 @@ # If obsid hasn't been seen before, create obsid object unless ($obs{$obsid}) { - push @obsid_id, $obsid; - $obs{$obsid} = Ska::Starcheck::Obsid->new($obsid, $date[$i]); + push @obsid_id, $obsid; + $obs{$obsid} = Ska::Starcheck::Obsid->new($obsid, $date[$i]); + $n_obsid++; } # Add the command to the correct obs object @@ -358,6 +383,10 @@ date => $date[$i], time => $time[$i], cmd => $cmd[$i] } ); + + if (defined $MAX_OBSIDS and $n_obsid > $MAX_OBSIDS) { + last; + } } # Read guide star summary file $guide_summ. This file is the OFLS summary of @@ -394,10 +423,12 @@ } # Check that every Guide summary OFLS ID has a matching OFLS ID in DOT - -foreach my $oflsid (keys %guidesumm){ - unless (defined $obs{$oflsid}){ - warning("OFLS ID $oflsid in Guide Summ but not in DOT! \n"); +# Skip this check if developing code with MAX_OBSIDS set +if (not defined $MAX_OBSIDS){ + foreach my $oflsid (keys %guidesumm){ + unless (defined $obs{$oflsid}){ + warning("OFLS ID $oflsid in Guide Summ but not in DOT! \n"); + } } } @@ -409,8 +440,7 @@ $obs{$oflsid}->add_guide_summ($oflsid, \%guidesumm); } else { - my $obsid = $obs{$oflsid}->{obsid}; - my $cat = Ska::Starcheck::Obsid::find_command($obs{$obsid}, "MP_STARCAT"); + my $cat = Ska::Starcheck::Obsid::find_command($obs{$oflsid}, "MP_STARCAT"); if (defined $cat){ push @{$obs{$oflsid}->{warn}}, sprintf("No Guide Star Summary for obsid $obsid ($oflsid). \n"); } @@ -495,19 +525,27 @@ sub json_obsids{ # Set the thermal model run start time to be either the supplied # run_start_time or now. -my $run_start_time = get_run_start_time($par{run_start_time}, $bs[0]->{date}); +my $run_start_time = call_python( + "utils.get_run_start_time", + [$par{run_start_time}, $bs[0]->{date}] +); my $json_text = json_obsids(); my $obsid_temps; my $json_obsid_temps; -$json_obsid_temps = ccd_temp_wrapper({oflsdir=> $par{dir}, - outdir=>$STARCHECK, - json_obsids => $json_text, - orlist => $or_file, - run_start_time => $run_start_time, - verbose => $par{verbose}, - maude => $par{maude}, - }); +$json_obsid_temps = call_python( + "utils.ccd_temp_wrapper", + [], + { + oflsdir=> $par{dir}, + outdir=>$STARCHECK, + json_obsids => $json_text, + orlist => $or_file, + run_start_time => $run_start_time, + verbose => $par{verbose}, + maude => $par{maude}, + } +); # convert back from JSON outside $obsid_temps = JSON::from_json($json_obsid_temps); @@ -538,7 +576,7 @@ sub json_obsids{ starcat_time=>"$obs{$obsid}->{date}", duration=>($obs{$obsid}->{obs_tstop} - $obs{$obsid}->{obs_tstart}), outdir=>$STARCHECK, agasc_file=>$agasc_file); - plot_cat_wrapper(\%plot_args); + call_python("utils.plot_cat_wrapper", [], \%plot_args); $obs{$obsid}->{plot_file} = "$STARCHECK/stars_$obs{$obsid}->{obsid}.png"; $obs{$obsid}->{plot_field_file} = "$STARCHECK/star_view_$obs{$obsid}->{obsid}.png"; $obs{$obsid}->{compass_file} = "$STARCHECK/compass$obs{$obsid}->{obsid}.png"; @@ -578,14 +616,14 @@ sub json_obsids{ $out .= "------------ Starcheck $version -----------------\n"; $out .= " Run on $date by $ENV{USER} from $hostname\n"; $out .= " Configuration: Using AGASC at $agasc_file\n"; -my $chandra_models_version = get_chandra_models_version(); +my $chandra_models_version = call_python("utils.get_chandra_models_version"); $out .= " chandra_models version: $chandra_models_version\n"; -my $kadi_scenario = get_kadi_scenario(); +my $kadi_scenario = call_python("utils.get_kadi_scenario"); if ($kadi_scenario ne "flight") { $kadi_scenario = "${red_font_start}${kadi_scenario}${font_stop}"; } $out .= " Kadi scenario: $kadi_scenario\n"; -my $cheta_source = get_cheta_source(); +my $cheta_source = call_python("utils.get_cheta_source"); if ($cheta_source ne 'cxc'){ $cheta_source = "${red_font_start}${cheta_source}${font_stop}"; } @@ -631,9 +669,14 @@ sub json_obsids{ # Check for just NMAN during high IR Zone $out .= "------------ HIGH IR ZONE CHECK -----------------\n\n"; my $ir_report = "${STARCHECK}/high_ir_check.txt"; -my $ir_ok = make_ir_check_report({ - backstop_file=> $backstop, - out=> $ir_report}); +my $ir_ok = call_python( + "utils.make_ir_check_report", + [], + { + backstop_file=> $backstop, + out=> $ir_report + } +); if ($ir_ok){ $out .= "[OK] In NMAN during High IR Zones.\n"; } @@ -662,10 +705,19 @@ sub json_obsids{ } else{ my $att_report = "${STARCHECK}/pcad_att_check.txt"; - my $att_ok = _make_pcad_attitude_check_report({ - backstop_file=> $backstop, or_list_file=>$or_file, - simtrans_file=>$simtrans_file, simfocus_file=>$simfocus_file, attitude_file=>$attitude_file, - ofls_characteristics_file=>$char_file, out=>$att_report, dynamic_offsets_file=>$aimpoint_file}); + my $att_ok = call_python( + "utils._make_pcad_attitude_check_report", + [], + { + backstop_file=> $backstop, + or_list_file=>$or_file, + simtrans_file=>$simtrans_file, + simfocus_file=>$simfocus_file, + attitude_file=>$attitude_file, + ofls_characteristics_file=>$char_file, + out=>$att_report, + dynamic_offsets_file=>$aimpoint_file + }); if ($att_ok){ $out .= "[OK] Coordinates as expected.\n"; } @@ -675,7 +727,10 @@ sub json_obsids{ # Only check that characteristics file is less than 30 days old if backstop starts before 01-Aug-2016 my $CHAR_DATE_CHECK_BEFORE = '2016:214:00:00:00.000'; if ($bs[0]->{time} < date2time($CHAR_DATE_CHECK_BEFORE)){ - if (check_characteristics_date($char_file, $date[0])){ + if (call_python( + "pcad_att_check.check_characteristics_date", + [$char_file, $date[0]]) + ) { $out .= "[OK] Characteristics file newer than 30 days\n\n"; } else{ @@ -1109,8 +1164,21 @@ sub usage exit($exit) if ($exit); } - - +END { + if (defined $pid) { + if ($par{verbose} gt 1){ + my $server_calls = call_python("get_server_calls"); + # print the server_calls hash sorted by value in descending order + print("Python server calls:"); + print Dumper($server_calls); + } + if ($par{verbose} gt 1){ + print("Shutting down python starcheck server with pid=$pid\n"); + } + kill 9, $pid; # must it be 9 (SIGKILL)? + my $gone_pid = waitpid $pid, 0; # then check that it's gone + } +}; =pod @@ -1140,6 +1208,12 @@ =head1 OPTIONS Output reports will be .html, .txt. Star plots will be /stars_.png. The default is = 'STARCHECK'. +=item B<-verbose > + +Output verbosity. The default is 1. verbose=2 includes kadi logger INFO statements, +starcheck.server startup and shutdown information. verbose > 2 includes starcheck.server +full debug output. + =item B<-vehicle> Use vehicle-only products and the vehicle-only ACA checklist to perform @@ -1164,6 +1238,10 @@ =head1 OPTIONS Use MAUDE for telemetry instead of default cheta cxc archive. MAUDE will also be used if no AACCCDPT telemetry can be found in cheta archive for initial conditions. +=item B<-max_obsids > + +Limit starcheck review to first N obsids (for testing). + =item B<-agasc_file > Specify location of agasc h5 file. Default is SKA/data/agasc/agasc1p7.h5 . diff --git a/starcheck/utils.py b/starcheck/utils.py index 3d42537a..0a6276a5 100644 --- a/starcheck/utils.py +++ b/starcheck/utils.py @@ -1,13 +1,25 @@ -import functools import logging import os from pathlib import Path +import agasc +import cxotime +import mica.stats.acq_stats +import mica.stats.guide_stats import numpy as np import proseco.characteristics as proseco_char +import proseco.characteristics as char +import Quaternion +from astropy.table import Table from Chandra.Time import DateTime -from Chandra.Time import date2secs, secs2date +from chandra_aca.star_probs import guide_count, mag_for_p_acq +from chandra_aca.transform import mag_to_count_rate, pixels_to_yagzag, yagzag_to_pixels from kadi.commands import states +from mica.archive import aca_dark +from proseco.catalog import get_aca_catalog, get_effective_t_ccd +from proseco.core import ACABox +from proseco.guide import get_imposter_mags +from Ska.quatutil import radec2yagzag from testr import test_helper import starcheck @@ -17,119 +29,113 @@ from starcheck.pcad_att_check import make_pcad_attitude_check_report from starcheck.plot import make_plots_for_obsid +ACQS = mica.stats.acq_stats.get_stats() +GUIDES = mica.stats.guide_stats.get_stats() -def python_from_perl(func): - """Decorator to facilitate calling Python from Perl inline. - - Convert byte strings to unicode. - - Print stack trace on exceptions which perl inline suppresses. - """ - @functools.wraps(func) - def wrapper(*args, **kwargs): - args = de_bytestr(args) - kwargs = de_bytestr(kwargs) - try: - return func(*args, **kwargs) - except Exception: - import traceback - traceback.print_exc() - raise - return wrapper +def date2secs(val): + """Convert date to seconds since 1998.0""" + out = cxotime.date2secs(val) + # if isinstance(out, (np.number, np.ndarray)): + # out = out.tolist() + return out + + +def secs2date(val): + """Convert date to seconds since 1998.0""" + out = cxotime.secs2date(val) + # if isinstance(out, (np.number, np.ndarray)): + # out = out.tolist() + return out -# Borrowed from https://stackoverflow.com/a/33160507 -def de_bytestr(data): - if isinstance(data, bytes): - return data.decode() - if isinstance(data, dict): - return dict(map(de_bytestr, data.items())) - if isinstance(data, tuple): - return tuple(map(de_bytestr, data)) - if isinstance(data, list): - return list(map(de_bytestr, data)) - if isinstance(data, set): - return set(map(de_bytestr, data)) - return data +def date2time(val): + out = cxotime.date2secs(val) + if isinstance(out, (np.number, np.ndarray)): + out = out.tolist() + return out -date2time = python_from_perl(date2secs) -time2date = python_from_perl(secs2date) +def time2date(val): + out = cxotime.secs2date(val) + if isinstance(out, (np.number, np.ndarray)): + out = out.tolist() + return out -@python_from_perl -def ccd_temp_wrapper(kwargs): +def ccd_temp_wrapper(**kwargs): return get_ccd_temps(**kwargs) -@python_from_perl -def plot_cat_wrapper(kwargs): +def plot_cat_wrapper(**kwargs): return make_plots_for_obsid(**kwargs) -@python_from_perl def starcheck_version(): return version -@python_from_perl def get_chandra_models_version(): return proseco_char.chandra_models_version -@python_from_perl def set_kadi_scenario_default(): # For kadi commands v2 running on HEAD set the default scenario to flight. # This is aimed at running in production where the commands archive is # updated hourly. In this case no network resources are used. if test_helper.on_head_network(): - os.environ.setdefault('KADI_SCENARIO', 'flight') + os.environ.setdefault("KADI_SCENARIO", "flight") -@python_from_perl def get_cheta_source(): sources = starcheck.calc_ccd_temps.fetch.data_source.sources() - if len(sources) == 1 and sources[0] == 'cxc': - return 'cxc' + if len(sources) == 1 and sources[0] == "cxc": + return "cxc" else: return str(sources) -@python_from_perl def get_kadi_scenario(): - return os.getenv('KADI_SCENARIO', default="None") + return os.getenv("KADI_SCENARIO", default="None") -@python_from_perl def get_data_dir(): - sc_data = os.path.join(os.path.dirname(starcheck.__file__), 'data') + sc_data = os.path.join(os.path.dirname(starcheck.__file__), "data") return sc_data if os.path.exists(sc_data) else "" -@python_from_perl -def _make_pcad_attitude_check_report(kwargs): +def _make_pcad_attitude_check_report(**kwargs): return make_pcad_attitude_check_report(**kwargs) -@python_from_perl -def make_ir_check_report(kwargs): +def make_ir_check_report(**kwargs): return ir_zone_ok(**kwargs) -@python_from_perl def get_dither_kadi_state(date): - cols = ['dither', 'dither_ampl_pitch', 'dither_ampl_yaw', - 'dither_period_pitch', 'dither_period_yaw'] + cols = [ + "dither", + "dither_ampl_pitch", + "dither_ampl_yaw", + "dither_period_pitch", + "dither_period_yaw", + ] state = states.get_continuity(date, cols) # Cast the numpy floats as plain floats - for key in ['dither_ampl_pitch', 'dither_ampl_yaw', - 'dither_period_pitch', 'dither_period_yaw']: + for key in [ + "dither_ampl_pitch", + "dither_ampl_yaw", + "dither_period_pitch", + "dither_period_yaw", + ]: state[key] = float(state[key]) # get most recent change time - state['time'] = float(np.max([DateTime(state['__dates__'][key]).secs for key in cols])) + state["time"] = float( + np.max([DateTime(state["__dates__"][key]).secs for key in cols]) + ) return state -@python_from_perl def get_run_start_time(run_start_time, backstop_start): """ Determine a reasonable reference run start time based on the supplied @@ -166,7 +172,6 @@ def get_run_start_time(run_start_time, backstop_start): return ref_time.date -@python_from_perl def config_logging(outdir, verbose, name): """Set up file and console logger. See http://docs.python.org/library/logging.html @@ -178,12 +183,13 @@ def config_logging(outdir, verbose, name): class NullHandler(logging.Handler): def emit(self, record): pass + rootlogger = logging.getLogger() rootlogger.addHandler(NullHandler()) - loglevel = {0: logging.CRITICAL, - 1: logging.INFO, - 2: logging.DEBUG}.get(int(verbose), logging.INFO) + loglevel = {0: logging.CRITICAL, 1: logging.INFO, 2: logging.DEBUG}.get( + int(verbose), logging.INFO + ) logger = logging.getLogger(name) logger.setLevel(loglevel) @@ -192,7 +198,7 @@ def emit(self, record): for handler in list(logger.handlers): logger.removeHandler(handler) - formatter = logging.Formatter('%(message)s') + formatter = logging.Formatter("%(message)s") console = logging.StreamHandler() console.setFormatter(formatter) @@ -200,6 +206,277 @@ def emit(self, record): outdir = Path(outdir) outdir.mkdir(parents=True, exist_ok=True) - filehandler = logging.FileHandler(filename=outdir / 'run.dat', mode='w') + filehandler = logging.FileHandler(filename=outdir / "run.dat", mode="w") filehandler.setFormatter(formatter) logger.addHandler(filehandler) + + +def _get_aca_limits(): + return float(char.aca_t_ccd_planning_limit), float(char.aca_t_ccd_penalty_limit) + + +def _pixels_to_yagzag(i, j): + """ + Call chandra_aca.transform.pixels_to_yagzag. + This wrapper is set to pass allow_bad=True, as exceptions from the Python side + in this case would not be helpful, and the very small bad pixel list should be + on the CCD. + :params i: pixel row + :params j: pixel col + :returns tuple: yag, zag as floats + """ + yag, zag = pixels_to_yagzag(i, j, allow_bad=True) + # Convert to lists or floats to avoid numpy types which are not JSON serializable + return yag.tolist(), zag.tolist() + + +def _yagzag_to_pixels(yag, zag): + """ + Call chandra_aca.transform.yagzag_to_pixels. + This wrapper is set to pass allow_bad=True, as exceptions from the Python side + in this case would not be helpful, and the boundary checks and such will work fine + on the Perl side even if the returned row/col is off the CCD. + :params yag: y-angle arcsecs (hopefully as a number from the Perl) + :params zag: z-angle arcsecs (hopefully as a number from the Perl) + :returns tuple: row, col as floats + """ + row, col = yagzag_to_pixels(yag, zag, allow_bad=True) + # Convert to lists or floats to avoid numpy types which are not JSON serializable + return row.tolist(), col.tolist() + + +def _guide_count(mags, t_ccd, count_9th=False): + eff_t_ccd = get_effective_t_ccd(t_ccd) + return float(guide_count(np.array(mags), eff_t_ccd, count_9th)) + + +def check_hot_pix(idxs, yags, zags, mags, types, t_ccd, date, dither_y, dither_z): + """ + Return a list of info to make warnings on guide stars or fid lights with + local dark map that gives an 'imposter_mag' that could perturb a centroid. + The potential worst-case offsets (ignoring effects at the background pixels) + are returned and checking against offset limits needs to be done from + calling code. + + This fetches the dark current before the date of the observation and passes + it to proseco.get_imposter_mags with the star candidate positions to fetch + the brightest 2x2 for each and calculates the mag for that region. The + worse case offset is then added to an entry for the star index. + + :param idxs: catalog indexes as list or array + :param yags: catalog yangs as list or array + :param zags: catalog zangs as list or array + :param mags: catalog mags (AGASC mags for stars estimated fid mags for fids) + list or array + :param types: catalog TYPE (ACQ|BOT|FID|MON|GUI) as list or array + :param t_ccd: observation t_ccd in deg C (should be max t_ccd in guide + phase) + :param date: observation date (str) + :param dither_y: dither_y in arcsecs (guide dither) + :param dither_z: dither_z in arcsecs (guide dither) + :param yellow_lim: yellow limit centroid offset threshold limit (in arcsecs) + :param red_lim: red limit centroid offset threshold limit (in arcsecs) + :return imposters: list of dictionaries with keys that define the index, the + imposter mag, a 'status' key that has value 0 if the code to get + the imposter mag ran successfully, calculated centroid offset, and + star or fid info to make a warning. + """ + + eff_t_ccd = get_effective_t_ccd(t_ccd) + + dark = aca_dark.get_dark_cal_image(date=date, t_ccd_ref=eff_t_ccd, aca_image=True) + + def imposter_offset(cand_mag, imposter_mag): + """ + For a given candidate star and the pseudomagnitude of the brightest 2x2 + imposter calculate the max offset of the imposter counts are at the edge + of the 6x6 (as if they were in one pixel). This is somewhat the inverse + of proseco.get_pixmag_for_offset + """ + cand_counts = mag_to_count_rate(cand_mag) + spoil_counts = mag_to_count_rate(imposter_mag) + return spoil_counts * 3 * 5 / (spoil_counts + cand_counts) + + imposters = [] + for idx, yag, zag, mag, ctype in zip(idxs, yags, zags, mags, types): + if ctype in ["BOT", "GUI", "FID"]: + if ctype in ["BOT", "GUI"]: + dither = ACABox((dither_y, dither_z)) + else: + dither = ACABox((5.0, 5.0)) + row, col = yagzag_to_pixels(yag, zag, allow_bad=True) + # Handle any errors in get_imposter_mags with a try/except. This doesn't + # try to pass back a message. Most likely this will only fail if the star + # or fid is completely off the CCD and will have other warning. + try: + # get_imposter_mags takes a Table of candidates as its first argument, so construct + # a single-candidate table `entries` + entries = Table( + [{"idx": idx, "row": row, "col": col, "mag": mag, "type": ctype}] + ) + imp_mags, imp_rows, imp_cols = get_imposter_mags(entries, dark, dither) + offset = imposter_offset(mag, imp_mags[0]) + imposters.append( + { + "idx": int(idx), + "status": int(0), + "entry_row": float(row), + "entry_col": float(col), + "bad2_row": float(imp_rows[0]), + "bad2_col": float(imp_cols[0]), + "bad2_mag": float(imp_mags[0]), + "offset": float(offset), + } + ) + except Exception: + imposters.append({"idx": int(idx), "status": int(1)}) + return imposters + + +def _get_agasc_stars(ra, dec, roll, radius, date, agasc_file): + """ + Fetch the cone of agasc stars. Update the table with the yag and zag of each star. + Return as a dictionary with the agasc ids as keys and all of the values as + simple Python types (int, float) + """ + stars = agasc.get_agasc_cone( + float(ra), + float(dec), + float(radius), + date, + agasc_file, + ) + q_aca = Quaternion.Quat([float(ra), float(dec), float(roll)]) + yags, zags = radec2yagzag(stars["RA_PMCORR"], stars["DEC_PMCORR"], q_aca) + yags *= 3600 + zags *= 3600 + stars["yang"] = yags + stars["zang"] = zags + + # Get a dictionary of the stars with the columns that are used + # This needs to be de-numpy-ified to pass back into Perl + stars_dict = {} + for star in stars: + stars_dict[str(star["AGASC_ID"])] = { + "id": int(star["AGASC_ID"]), + "class": int(star["CLASS"]), + "ra": float(star["RA_PMCORR"]), + "dec": float(star["DEC_PMCORR"]), + "mag_aca": float(star["MAG_ACA"]), + "bv": float(star["COLOR1"]), + "color1": float(star["COLOR1"]), + "mag_aca_err": float(star["MAG_ACA_ERR"]), + "poserr": float(star["POS_ERR"]), + "yag": float(star["yang"]), + "zag": float(star["zang"]), + "aspq": int(star["ASPQ1"]), + "var": int(star["VAR"]), + "aspq1": int(star["ASPQ1"]), + } + + return stars_dict + + +def get_mica_star_stats(agasc_id, time): + """ + Get the acq and guide star statistics for a star before a given time. + The time filter is just there to make this play well when run in regression. + The mica acq and guide stats are fetched into globals ACQS and GUIDES + and this method just filters for the relevant ones for a star and returns + a dictionary of summarized statistics. + + :param agasc_id: agasc id of star + :param time: time used as end of range to retrieve statistics. + + :return: dictionary of stats for the observed history of the star + """ + + # Cast the inputs + time = float(time) + agasc_id = int(agasc_id) + + acqs = Table(ACQS[(ACQS["agasc_id"] == agasc_id) & (ACQS["guide_tstart"] < time)]) + ok = acqs["img_func"] == "star" + guides = Table( + GUIDES[(GUIDES["agasc_id"] == agasc_id) & (GUIDES["kalman_tstart"] < time)] + ) + mags = np.concatenate( + [ + acqs["mag_obs"][acqs["mag_obs"] != 0], + guides["aoacmag_mean"][guides["aoacmag_mean"] != 0], + ] + ) + + avg_mag = float(np.mean(mags)) if (len(mags) > 0) else float(13.94) + stats = { + "acq": len(acqs), + "acq_noid": int(np.count_nonzero(~ok)), + "gui": len(guides), + "gui_bad": int(np.count_nonzero(guides["f_track"] < 0.95)), + "gui_fail": int(np.count_nonzero(guides["f_track"] < 0.01)), + "gui_obc_bad": int(np.count_nonzero(guides["f_obc_bad"] > 0.05)), + "avg_mag": avg_mag, + } + return stats + + +def _mag_for_p_acq(p_acq, date, t_ccd): + """ + Call mag_for_p_acq, but cast p_acq and t_ccd as floats. + """ + eff_t_ccd = get_effective_t_ccd(t_ccd) + return mag_for_p_acq(float(p_acq), date, float(eff_t_ccd)) + + +def proseco_probs(**kw): + """ + Call proseco's get_acq_catalog with the parameters supplied in `kwargs` for + a specific obsid catalog and return the individual acq star probabilities, + the P2 value for the catalog, and the expected number of acq stars. + + `kwargs` will be a Perl hash converted to dict of the expected + keyword params. These keys must be defined: + + 'q1', 'q2', 'q3', 'q4' = the target quaternion 'man_angle' the maneuver + angle to the target quaternion in degrees. 'acq_ids' list of acq star ids + 'halfwidths' list of acq star halfwidths in arcsecs 't_ccd_acq' acquisition + temperature in deg C 'date' observation date (in Chandra.Time compatible + format) 'detector' science detector 'sim_offset' SIM offset + + As these values are from a Perl hash, bytestrings will be converted by + de_bytestr early in this method. + + :param **kw: dict of expected keywords + :return tuple: (list of floats of star acq probabilties, float P2, float + expected acq stars) + + """ + + args = dict( + obsid=0, + att=Quaternion.normalize(kw["att"]), + date=kw["date"], + n_acq=kw["n_acq"], + man_angle=kw["man_angle"], + t_ccd_acq=kw["t_ccd_acq"], + t_ccd_guide=kw["t_ccd_guide"], + dither_acq=ACABox(kw["dither_acq"]), + dither_guide=ACABox(kw["dither_guide"]), + include_ids_acq=kw["include_ids_acq"], + include_halfws_acq=kw["include_halfws_acq"], + detector=kw["detector"], + sim_offset=kw["sim_offset"], + n_fid=0, + n_guide=0, + focus_offset=0, + ) + aca = get_aca_catalog(**args) + acq_cat = aca.acqs + + # Assign the proseco probabilities back into an array. + p_acqs = [ + float(acq_cat["p_acq"][acq_cat["id"] == acq_id][0]) + for acq_id in kw["include_ids_acq"] + ] + + return p_acqs, float(-np.log10(acq_cat.calc_p_safe())), float(np.sum(p_acqs))